Lines Matching refs:ioc

26 void get_io_context(struct io_context *ioc)  in get_io_context()  argument
28 BUG_ON(atomic_long_read(&ioc->refcount) <= 0); in get_io_context()
29 atomic_long_inc(&ioc->refcount); in get_io_context()
62 struct io_context *ioc = icq->ioc; in ioc_destroy_icq() local
66 lockdep_assert_held(&ioc->lock); in ioc_destroy_icq()
68 radix_tree_delete(&ioc->icq_tree, icq->q->id); in ioc_destroy_icq()
77 if (rcu_access_pointer(ioc->icq_hint) == icq) in ioc_destroy_icq()
78 rcu_assign_pointer(ioc->icq_hint, NULL); in ioc_destroy_icq()
97 struct io_context *ioc = container_of(work, struct io_context, in ioc_release_fn() local
99 spin_lock_irq(&ioc->lock); in ioc_release_fn()
101 while (!hlist_empty(&ioc->icq_list)) { in ioc_release_fn()
102 struct io_cq *icq = hlist_entry(ioc->icq_list.first, in ioc_release_fn()
114 spin_unlock(&ioc->lock); in ioc_release_fn()
116 spin_lock(&ioc->lock); in ioc_release_fn()
130 spin_unlock_irq(&ioc->lock); in ioc_release_fn()
132 kmem_cache_free(iocontext_cachep, ioc); in ioc_release_fn()
142 void put_io_context(struct io_context *ioc) in put_io_context() argument
147 if (ioc == NULL) in put_io_context()
150 BUG_ON(atomic_long_read(&ioc->refcount) <= 0); in put_io_context()
156 if (atomic_long_dec_and_test(&ioc->refcount)) { in put_io_context()
157 spin_lock_irqsave(&ioc->lock, flags); in put_io_context()
158 if (!hlist_empty(&ioc->icq_list)) in put_io_context()
160 &ioc->release_work); in put_io_context()
163 spin_unlock_irqrestore(&ioc->lock, flags); in put_io_context()
167 kmem_cache_free(iocontext_cachep, ioc); in put_io_context()
177 void put_io_context_active(struct io_context *ioc) in put_io_context_active() argument
181 if (!atomic_dec_and_test(&ioc->active_ref)) { in put_io_context_active()
182 put_io_context(ioc); in put_io_context_active()
186 spin_lock_irq(&ioc->lock); in put_io_context_active()
187 hlist_for_each_entry(icq, &ioc->icq_list, ioc_node) { in put_io_context_active()
193 spin_unlock_irq(&ioc->lock); in put_io_context_active()
195 put_io_context(ioc); in put_io_context_active()
201 struct io_context *ioc; in exit_io_context() local
204 ioc = task->io_context; in exit_io_context()
208 atomic_dec(&ioc->nr_tasks); in exit_io_context()
209 put_io_context_active(ioc); in exit_io_context()
220 struct io_context *ioc = icq->ioc; in __ioc_clear_queue() local
222 spin_lock_irqsave(&ioc->lock, flags); in __ioc_clear_queue()
224 spin_unlock_irqrestore(&ioc->lock, flags); in __ioc_clear_queue()
228 spin_unlock_irqrestore(&ioc->lock, flags); in __ioc_clear_queue()
252 struct io_context *ioc; in create_task_io_context() local
255 ioc = kmem_cache_alloc_node(iocontext_cachep, gfp_flags | __GFP_ZERO, in create_task_io_context()
257 if (unlikely(!ioc)) in create_task_io_context()
261 atomic_long_set(&ioc->refcount, 1); in create_task_io_context()
262 atomic_set(&ioc->nr_tasks, 1); in create_task_io_context()
263 atomic_set(&ioc->active_ref, 1); in create_task_io_context()
264 spin_lock_init(&ioc->lock); in create_task_io_context()
265 INIT_RADIX_TREE(&ioc->icq_tree, GFP_ATOMIC); in create_task_io_context()
266 INIT_HLIST_HEAD(&ioc->icq_list); in create_task_io_context()
267 INIT_WORK(&ioc->release_work, ioc_release_fn); in create_task_io_context()
279 task->io_context = ioc; in create_task_io_context()
281 kmem_cache_free(iocontext_cachep, ioc); in create_task_io_context()
306 struct io_context *ioc; in get_task_io_context() local
312 ioc = task->io_context; in get_task_io_context()
313 if (likely(ioc)) { in get_task_io_context()
314 get_io_context(ioc); in get_task_io_context()
316 return ioc; in get_task_io_context()
332 struct io_cq *ioc_lookup_icq(struct io_context *ioc, struct request_queue *q) in ioc_lookup_icq() argument
345 icq = rcu_dereference(ioc->icq_hint); in ioc_lookup_icq()
349 icq = radix_tree_lookup(&ioc->icq_tree, q->id); in ioc_lookup_icq()
351 rcu_assign_pointer(ioc->icq_hint, icq); /* allowed to race */ in ioc_lookup_icq()
372 struct io_cq *ioc_create_icq(struct io_context *ioc, struct request_queue *q, in ioc_create_icq() argument
389 icq->ioc = ioc; in ioc_create_icq()
396 spin_lock(&ioc->lock); in ioc_create_icq()
398 if (likely(!radix_tree_insert(&ioc->icq_tree, q->id, icq))) { in ioc_create_icq()
399 hlist_add_head(&icq->ioc_node, &ioc->icq_list); in ioc_create_icq()
405 icq = ioc_lookup_icq(ioc, q); in ioc_create_icq()
410 spin_unlock(&ioc->lock); in ioc_create_icq()