Lines Matching refs:ioc
29 static void get_io_context(struct io_context *ioc) in get_io_context() argument
31 BUG_ON(atomic_long_read(&ioc->refcount) <= 0); in get_io_context()
32 atomic_long_inc(&ioc->refcount); in get_io_context()
59 static void ioc_exit_icqs(struct io_context *ioc) in ioc_exit_icqs() argument
63 spin_lock_irq(&ioc->lock); in ioc_exit_icqs()
64 hlist_for_each_entry(icq, &ioc->icq_list, ioc_node) in ioc_exit_icqs()
66 spin_unlock_irq(&ioc->lock); in ioc_exit_icqs()
75 struct io_context *ioc = icq->ioc; in ioc_destroy_icq() local
79 lockdep_assert_held(&ioc->lock); in ioc_destroy_icq()
85 radix_tree_delete(&ioc->icq_tree, icq->q->id); in ioc_destroy_icq()
94 if (rcu_access_pointer(ioc->icq_hint) == icq) in ioc_destroy_icq()
95 rcu_assign_pointer(ioc->icq_hint, NULL); in ioc_destroy_icq()
114 struct io_context *ioc = container_of(work, struct io_context, in ioc_release_fn() local
116 spin_lock_irq(&ioc->lock); in ioc_release_fn()
118 while (!hlist_empty(&ioc->icq_list)) { in ioc_release_fn()
119 struct io_cq *icq = hlist_entry(ioc->icq_list.first, in ioc_release_fn()
131 spin_unlock(&ioc->lock); in ioc_release_fn()
133 spin_lock(&ioc->lock); in ioc_release_fn()
142 spin_unlock_irq(&ioc->lock); in ioc_release_fn()
144 kmem_cache_free(iocontext_cachep, ioc); in ioc_release_fn()
151 static bool ioc_delay_free(struct io_context *ioc) in ioc_delay_free() argument
155 spin_lock_irqsave(&ioc->lock, flags); in ioc_delay_free()
156 if (!hlist_empty(&ioc->icq_list)) { in ioc_delay_free()
157 queue_work(system_power_efficient_wq, &ioc->release_work); in ioc_delay_free()
158 spin_unlock_irqrestore(&ioc->lock, flags); in ioc_delay_free()
161 spin_unlock_irqrestore(&ioc->lock, flags); in ioc_delay_free()
182 spin_lock(&icq->ioc->lock); in ioc_clear_queue()
184 spin_unlock(&icq->ioc->lock); in ioc_clear_queue()
189 static inline void ioc_exit_icqs(struct io_context *ioc) in ioc_exit_icqs() argument
192 static inline bool ioc_delay_free(struct io_context *ioc) in ioc_delay_free() argument
205 void put_io_context(struct io_context *ioc) in put_io_context() argument
207 BUG_ON(atomic_long_read(&ioc->refcount) <= 0); in put_io_context()
208 if (atomic_long_dec_and_test(&ioc->refcount) && !ioc_delay_free(ioc)) in put_io_context()
209 kmem_cache_free(iocontext_cachep, ioc); in put_io_context()
216 struct io_context *ioc; in exit_io_context() local
219 ioc = task->io_context; in exit_io_context()
223 if (atomic_dec_and_test(&ioc->active_ref)) { in exit_io_context()
224 ioc_exit_icqs(ioc); in exit_io_context()
225 put_io_context(ioc); in exit_io_context()
231 struct io_context *ioc; in alloc_io_context() local
233 ioc = kmem_cache_alloc_node(iocontext_cachep, gfp_flags | __GFP_ZERO, in alloc_io_context()
235 if (unlikely(!ioc)) in alloc_io_context()
238 atomic_long_set(&ioc->refcount, 1); in alloc_io_context()
239 atomic_set(&ioc->active_ref, 1); in alloc_io_context()
241 spin_lock_init(&ioc->lock); in alloc_io_context()
242 INIT_RADIX_TREE(&ioc->icq_tree, GFP_ATOMIC); in alloc_io_context()
243 INIT_HLIST_HEAD(&ioc->icq_list); in alloc_io_context()
244 INIT_WORK(&ioc->release_work, ioc_release_fn); in alloc_io_context()
246 ioc->ioprio = IOPRIO_DEFAULT; in alloc_io_context()
248 return ioc; in alloc_io_context()
271 struct io_context *ioc; in set_task_ioprio() local
275 ioc = alloc_io_context(GFP_ATOMIC, NUMA_NO_NODE); in set_task_ioprio()
276 if (!ioc) in set_task_ioprio()
281 kmem_cache_free(iocontext_cachep, ioc); in set_task_ioprio()
285 kmem_cache_free(iocontext_cachep, ioc); in set_task_ioprio()
287 task->io_context = ioc; in set_task_ioprio()
298 struct io_context *ioc = current->io_context; in __copy_io() local
304 atomic_inc(&ioc->active_ref); in __copy_io()
305 tsk->io_context = ioc; in __copy_io()
306 } else if (ioprio_valid(ioc->ioprio)) { in __copy_io()
310 tsk->io_context->ioprio = ioc->ioprio; in __copy_io()
326 struct io_context *ioc = current->io_context; in ioc_lookup_icq() local
338 icq = rcu_dereference(ioc->icq_hint); in ioc_lookup_icq()
342 icq = radix_tree_lookup(&ioc->icq_tree, q->id); in ioc_lookup_icq()
344 rcu_assign_pointer(ioc->icq_hint, icq); /* allowed to race */ in ioc_lookup_icq()
365 struct io_context *ioc = current->io_context; in ioc_create_icq() local
380 icq->ioc = ioc; in ioc_create_icq()
387 spin_lock(&ioc->lock); in ioc_create_icq()
389 if (likely(!radix_tree_insert(&ioc->icq_tree, q->id, icq))) { in ioc_create_icq()
390 hlist_add_head(&icq->ioc_node, &ioc->icq_list); in ioc_create_icq()
401 spin_unlock(&ioc->lock); in ioc_create_icq()
409 struct io_context *ioc = current->io_context; in ioc_find_get_icq() local
412 if (unlikely(!ioc)) { in ioc_find_get_icq()
413 ioc = alloc_io_context(GFP_ATOMIC, q->node); in ioc_find_get_icq()
414 if (!ioc) in ioc_find_get_icq()
419 kmem_cache_free(iocontext_cachep, ioc); in ioc_find_get_icq()
420 ioc = current->io_context; in ioc_find_get_icq()
422 current->io_context = ioc; in ioc_find_get_icq()
425 get_io_context(ioc); in ioc_find_get_icq()
428 get_io_context(ioc); in ioc_find_get_icq()
438 put_io_context(ioc); in ioc_find_get_icq()