Lines Matching refs:vbq

2024 	struct vmap_block_queue *vbq;  in new_vmap_block()  local
2070 vbq = raw_cpu_ptr(&vmap_block_queue); in new_vmap_block()
2071 spin_lock(&vbq->lock); in new_vmap_block()
2072 list_add_tail_rcu(&vb->free_list, &vbq->free); in new_vmap_block()
2073 spin_unlock(&vbq->lock); in new_vmap_block()
2096 struct vmap_block_queue *vbq, struct list_head *purge_list, in purge_fragmented_block() argument
2113 spin_lock(&vbq->lock); in purge_fragmented_block()
2115 spin_unlock(&vbq->lock); in purge_fragmented_block()
2134 struct vmap_block_queue *vbq = &per_cpu(vmap_block_queue, cpu); in purge_fragmented_blocks() local
2137 list_for_each_entry_rcu(vb, &vbq->free, free_list) { in purge_fragmented_blocks()
2146 purge_fragmented_block(vb, vbq, &purge, true); in purge_fragmented_blocks()
2163 struct vmap_block_queue *vbq; in vb_alloc() local
2181 vbq = raw_cpu_ptr(&vmap_block_queue); in vb_alloc()
2182 list_for_each_entry_rcu(vb, &vbq->free, free_list) { in vb_alloc()
2199 spin_lock(&vbq->lock); in vb_alloc()
2201 spin_unlock(&vbq->lock); in vb_alloc()
2270 struct vmap_block_queue *vbq = &per_cpu(vmap_block_queue, cpu); in _vm_unmap_aliases() local
2275 xa_for_each(&vbq->vmap_blocks, idx, vb) { in _vm_unmap_aliases()
2283 if (!purge_fragmented_block(vb, vbq, &purge_list, false) && in _vm_unmap_aliases()
4466 struct vmap_block_queue *vbq; in vmalloc_init() local
4469 vbq = &per_cpu(vmap_block_queue, i); in vmalloc_init()
4470 spin_lock_init(&vbq->lock); in vmalloc_init()
4471 INIT_LIST_HEAD(&vbq->free); in vmalloc_init()
4475 xa_init(&vbq->vmap_blocks); in vmalloc_init()