Lines Matching refs:ioc
29 static void get_io_context(struct io_context *ioc) in get_io_context() argument
31 BUG_ON(atomic_long_read(&ioc->refcount) <= 0); in get_io_context()
32 atomic_long_inc(&ioc->refcount); in get_io_context()
59 static void ioc_exit_icqs(struct io_context *ioc) in ioc_exit_icqs() argument
63 spin_lock_irq(&ioc->lock); in ioc_exit_icqs()
64 hlist_for_each_entry(icq, &ioc->icq_list, ioc_node) in ioc_exit_icqs()
66 spin_unlock_irq(&ioc->lock); in ioc_exit_icqs()
75 struct io_context *ioc = icq->ioc; in ioc_destroy_icq() local
79 lockdep_assert_held(&ioc->lock); in ioc_destroy_icq()
81 radix_tree_delete(&ioc->icq_tree, icq->q->id); in ioc_destroy_icq()
90 if (rcu_access_pointer(ioc->icq_hint) == icq) in ioc_destroy_icq()
91 rcu_assign_pointer(ioc->icq_hint, NULL); in ioc_destroy_icq()
110 struct io_context *ioc = container_of(work, struct io_context, in ioc_release_fn() local
112 spin_lock_irq(&ioc->lock); in ioc_release_fn()
114 while (!hlist_empty(&ioc->icq_list)) { in ioc_release_fn()
115 struct io_cq *icq = hlist_entry(ioc->icq_list.first, in ioc_release_fn()
127 spin_unlock(&ioc->lock); in ioc_release_fn()
129 spin_lock(&ioc->lock); in ioc_release_fn()
143 spin_unlock_irq(&ioc->lock); in ioc_release_fn()
145 kmem_cache_free(iocontext_cachep, ioc); in ioc_release_fn()
152 static bool ioc_delay_free(struct io_context *ioc) in ioc_delay_free() argument
156 spin_lock_irqsave(&ioc->lock, flags); in ioc_delay_free()
157 if (!hlist_empty(&ioc->icq_list)) { in ioc_delay_free()
158 queue_work(system_power_efficient_wq, &ioc->release_work); in ioc_delay_free()
159 spin_unlock_irqrestore(&ioc->lock, flags); in ioc_delay_free()
162 spin_unlock_irqrestore(&ioc->lock, flags); in ioc_delay_free()
185 spin_lock_irq(&icq->ioc->lock); in ioc_clear_queue()
188 spin_unlock_irq(&icq->ioc->lock); in ioc_clear_queue()
193 static inline void ioc_exit_icqs(struct io_context *ioc) in ioc_exit_icqs() argument
196 static inline bool ioc_delay_free(struct io_context *ioc) in ioc_delay_free() argument
209 void put_io_context(struct io_context *ioc) in put_io_context() argument
211 BUG_ON(atomic_long_read(&ioc->refcount) <= 0); in put_io_context()
212 if (atomic_long_dec_and_test(&ioc->refcount) && !ioc_delay_free(ioc)) in put_io_context()
213 kmem_cache_free(iocontext_cachep, ioc); in put_io_context()
220 struct io_context *ioc; in exit_io_context() local
223 ioc = task->io_context; in exit_io_context()
227 if (atomic_dec_and_test(&ioc->active_ref)) { in exit_io_context()
228 ioc_exit_icqs(ioc); in exit_io_context()
229 put_io_context(ioc); in exit_io_context()
235 struct io_context *ioc; in alloc_io_context() local
237 ioc = kmem_cache_alloc_node(iocontext_cachep, gfp_flags | __GFP_ZERO, in alloc_io_context()
239 if (unlikely(!ioc)) in alloc_io_context()
242 atomic_long_set(&ioc->refcount, 1); in alloc_io_context()
243 atomic_set(&ioc->active_ref, 1); in alloc_io_context()
245 spin_lock_init(&ioc->lock); in alloc_io_context()
246 INIT_RADIX_TREE(&ioc->icq_tree, GFP_ATOMIC); in alloc_io_context()
247 INIT_HLIST_HEAD(&ioc->icq_list); in alloc_io_context()
248 INIT_WORK(&ioc->release_work, ioc_release_fn); in alloc_io_context()
250 ioc->ioprio = IOPRIO_DEFAULT; in alloc_io_context()
252 return ioc; in alloc_io_context()
275 struct io_context *ioc; in set_task_ioprio() local
279 ioc = alloc_io_context(GFP_ATOMIC, NUMA_NO_NODE); in set_task_ioprio()
280 if (!ioc) in set_task_ioprio()
285 kmem_cache_free(iocontext_cachep, ioc); in set_task_ioprio()
289 kmem_cache_free(iocontext_cachep, ioc); in set_task_ioprio()
291 task->io_context = ioc; in set_task_ioprio()
302 struct io_context *ioc = current->io_context; in __copy_io() local
308 atomic_inc(&ioc->active_ref); in __copy_io()
309 tsk->io_context = ioc; in __copy_io()
310 } else if (ioprio_valid(ioc->ioprio)) { in __copy_io()
314 tsk->io_context->ioprio = ioc->ioprio; in __copy_io()
330 struct io_context *ioc = current->io_context; in ioc_lookup_icq() local
342 icq = rcu_dereference(ioc->icq_hint); in ioc_lookup_icq()
346 icq = radix_tree_lookup(&ioc->icq_tree, q->id); in ioc_lookup_icq()
348 rcu_assign_pointer(ioc->icq_hint, icq); /* allowed to race */ in ioc_lookup_icq()
369 struct io_context *ioc = current->io_context; in ioc_create_icq() local
384 icq->ioc = ioc; in ioc_create_icq()
391 spin_lock(&ioc->lock); in ioc_create_icq()
393 if (likely(!radix_tree_insert(&ioc->icq_tree, q->id, icq))) { in ioc_create_icq()
394 hlist_add_head(&icq->ioc_node, &ioc->icq_list); in ioc_create_icq()
405 spin_unlock(&ioc->lock); in ioc_create_icq()
413 struct io_context *ioc = current->io_context; in ioc_find_get_icq() local
416 if (unlikely(!ioc)) { in ioc_find_get_icq()
417 ioc = alloc_io_context(GFP_ATOMIC, q->node); in ioc_find_get_icq()
418 if (!ioc) in ioc_find_get_icq()
423 kmem_cache_free(iocontext_cachep, ioc); in ioc_find_get_icq()
424 ioc = current->io_context; in ioc_find_get_icq()
426 current->io_context = ioc; in ioc_find_get_icq()
429 get_io_context(ioc); in ioc_find_get_icq()
432 get_io_context(ioc); in ioc_find_get_icq()
442 put_io_context(ioc); in ioc_find_get_icq()