Lines Matching refs:data
69 struct percpu_ref_data *data; in percpu_ref_init() local
76 data = kzalloc(sizeof(*ref->data), gfp); in percpu_ref_init()
77 if (!data) { in percpu_ref_init()
83 data->force_atomic = flags & PERCPU_REF_INIT_ATOMIC; in percpu_ref_init()
84 data->allow_reinit = flags & PERCPU_REF_ALLOW_REINIT; in percpu_ref_init()
88 data->allow_reinit = true; in percpu_ref_init()
98 atomic_long_set(&data->count, start_count); in percpu_ref_init()
100 data->release = release; in percpu_ref_init()
101 data->confirm_switch = NULL; in percpu_ref_init()
102 data->ref = ref; in percpu_ref_init()
103 ref->data = data; in percpu_ref_init()
114 WARN_ON_ONCE(ref->data && ref->data->confirm_switch); in __percpu_ref_exit()
132 struct percpu_ref_data *data = ref->data; in percpu_ref_exit() local
137 if (!data) in percpu_ref_exit()
141 ref->percpu_count_ptr |= atomic_long_read(&ref->data->count) << in percpu_ref_exit()
143 ref->data = NULL; in percpu_ref_exit()
146 kfree(data); in percpu_ref_exit()
152 struct percpu_ref_data *data = container_of(rcu, in percpu_ref_call_confirm_rcu() local
154 struct percpu_ref *ref = data->ref; in percpu_ref_call_confirm_rcu()
156 data->confirm_switch(ref); in percpu_ref_call_confirm_rcu()
157 data->confirm_switch = NULL; in percpu_ref_call_confirm_rcu()
160 if (!data->allow_reinit) in percpu_ref_call_confirm_rcu()
169 struct percpu_ref_data *data = container_of(rcu, in percpu_ref_switch_to_atomic_rcu() local
171 struct percpu_ref *ref = data->ref; in percpu_ref_switch_to_atomic_rcu()
181 atomic_long_read(&data->count), count); in percpu_ref_switch_to_atomic_rcu()
195 atomic_long_add((long)count - PERCPU_COUNT_BIAS, &data->count); in percpu_ref_switch_to_atomic_rcu()
197 if (WARN_ONCE(atomic_long_read(&data->count) <= 0, in percpu_ref_switch_to_atomic_rcu()
199 data->release, atomic_long_read(&data->count)) && in percpu_ref_switch_to_atomic_rcu()
202 mem_dump_obj(data); in percpu_ref_switch_to_atomic_rcu()
229 ref->data->confirm_switch = confirm_switch ?: in __percpu_ref_switch_to_atomic()
233 call_rcu_hurry(&ref->data->rcu, in __percpu_ref_switch_to_atomic()
247 if (WARN_ON_ONCE(!ref->data->allow_reinit)) in __percpu_ref_switch_to_percpu()
250 atomic_long_add(PERCPU_COUNT_BIAS, &ref->data->count); in __percpu_ref_switch_to_percpu()
268 struct percpu_ref_data *data = ref->data; in __percpu_ref_switch_mode() local
277 wait_event_lock_irq(percpu_ref_switch_waitq, !data->confirm_switch, in __percpu_ref_switch_mode()
280 if (data->force_atomic || percpu_ref_is_dying(ref)) in __percpu_ref_switch_mode()
313 ref->data->force_atomic = true; in percpu_ref_switch_to_atomic()
331 wait_event(percpu_ref_switch_waitq, !ref->data->confirm_switch); in percpu_ref_switch_to_atomic_sync()
359 ref->data->force_atomic = false; in percpu_ref_switch_to_percpu()
392 ref->data->release); in percpu_ref_kill_and_confirm()
420 if (ref->data) in percpu_ref_is_zero()
421 count = atomic_long_read(&ref->data->count); in percpu_ref_is_zero()