Lines Matching refs:ref
102 struct percpu_ref *ref; member
121 int __must_check percpu_ref_init(struct percpu_ref *ref,
124 void percpu_ref_exit(struct percpu_ref *ref);
125 void percpu_ref_switch_to_atomic(struct percpu_ref *ref,
127 void percpu_ref_switch_to_atomic_sync(struct percpu_ref *ref);
128 void percpu_ref_switch_to_percpu(struct percpu_ref *ref);
129 void percpu_ref_kill_and_confirm(struct percpu_ref *ref,
131 void percpu_ref_resurrect(struct percpu_ref *ref);
132 void percpu_ref_reinit(struct percpu_ref *ref);
133 bool percpu_ref_is_zero(struct percpu_ref *ref);
147 static inline void percpu_ref_kill(struct percpu_ref *ref) in percpu_ref_kill() argument
149 percpu_ref_kill_and_confirm(ref, NULL); in percpu_ref_kill()
158 static inline bool __ref_is_percpu(struct percpu_ref *ref, in __ref_is_percpu() argument
174 percpu_ptr = READ_ONCE(ref->percpu_count_ptr); in __ref_is_percpu()
198 static inline void percpu_ref_get_many(struct percpu_ref *ref, unsigned long nr) in percpu_ref_get_many() argument
204 if (__ref_is_percpu(ref, &percpu_count)) in percpu_ref_get_many()
207 atomic_long_add(nr, &ref->data->count); in percpu_ref_get_many()
220 static inline void percpu_ref_get(struct percpu_ref *ref) in percpu_ref_get() argument
222 percpu_ref_get_many(ref, 1); in percpu_ref_get()
235 static inline bool percpu_ref_tryget_many(struct percpu_ref *ref, in percpu_ref_tryget_many() argument
243 if (__ref_is_percpu(ref, &percpu_count)) { in percpu_ref_tryget_many()
247 ret = atomic_long_add_unless(&ref->data->count, nr, 0); in percpu_ref_tryget_many()
264 static inline bool percpu_ref_tryget(struct percpu_ref *ref) in percpu_ref_tryget() argument
266 return percpu_ref_tryget_many(ref, 1); in percpu_ref_tryget()
275 static inline bool percpu_ref_tryget_live_rcu(struct percpu_ref *ref) in percpu_ref_tryget_live_rcu() argument
282 if (likely(__ref_is_percpu(ref, &percpu_count))) { in percpu_ref_tryget_live_rcu()
285 } else if (!(ref->percpu_count_ptr & __PERCPU_REF_DEAD)) { in percpu_ref_tryget_live_rcu()
286 ret = atomic_long_inc_not_zero(&ref->data->count); in percpu_ref_tryget_live_rcu()
306 static inline bool percpu_ref_tryget_live(struct percpu_ref *ref) in percpu_ref_tryget_live() argument
311 ret = percpu_ref_tryget_live_rcu(ref); in percpu_ref_tryget_live()
326 static inline void percpu_ref_put_many(struct percpu_ref *ref, unsigned long nr) in percpu_ref_put_many() argument
332 if (__ref_is_percpu(ref, &percpu_count)) in percpu_ref_put_many()
334 else if (unlikely(atomic_long_sub_and_test(nr, &ref->data->count))) in percpu_ref_put_many()
335 ref->data->release(ref); in percpu_ref_put_many()
349 static inline void percpu_ref_put(struct percpu_ref *ref) in percpu_ref_put() argument
351 percpu_ref_put_many(ref, 1); in percpu_ref_put()
363 static inline bool percpu_ref_is_dying(struct percpu_ref *ref) in percpu_ref_is_dying() argument
365 return ref->percpu_count_ptr & __PERCPU_REF_DEAD; in percpu_ref_is_dying()