| /Linux-v6.1/lib/ |
| D | test_xarray.c | 20 void xa_dump(const struct xarray *xa) { } in xa_dump() argument 23 #define XA_BUG_ON(xa, x) do { \ argument 27 xa_dump(xa); \ 40 static void *xa_store_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_store_index() argument 42 return xa_store(xa, index, xa_mk_index(index), gfp); in xa_store_index() 45 static void xa_insert_index(struct xarray *xa, unsigned long index) in xa_insert_index() argument 47 XA_BUG_ON(xa, xa_insert(xa, index, xa_mk_index(index), in xa_insert_index() 51 static void xa_alloc_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_alloc_index() argument 55 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(index), xa_limit_32b, in xa_alloc_index() 57 XA_BUG_ON(xa, id != index); in xa_alloc_index() [all …]
|
| D | xarray.c | 31 static inline unsigned int xa_lock_type(const struct xarray *xa) in xa_lock_type() argument 33 return (__force unsigned int)xa->xa_flags & 3; in xa_lock_type() 56 static inline bool xa_track_free(const struct xarray *xa) in xa_track_free() argument 58 return xa->xa_flags & XA_FLAGS_TRACK_FREE; in xa_track_free() 61 static inline bool xa_zero_busy(const struct xarray *xa) in xa_zero_busy() argument 63 return xa->xa_flags & XA_FLAGS_ZERO_BUSY; in xa_zero_busy() 66 static inline void xa_mark_set(struct xarray *xa, xa_mark_t mark) in xa_mark_set() argument 68 if (!(xa->xa_flags & XA_FLAGS_MARK(mark))) in xa_mark_set() 69 xa->xa_flags |= XA_FLAGS_MARK(mark); in xa_mark_set() 72 static inline void xa_mark_clear(struct xarray *xa, xa_mark_t mark) in xa_mark_clear() argument [all …]
|
| D | idr.c | 383 XA_STATE(xas, &ida->xa, min / IDA_BITMAP_BITS); in ida_alloc_range() 489 XA_STATE(xas, &ida->xa, id / IDA_BITMAP_BITS); in ida_free() 543 XA_STATE(xas, &ida->xa, 0); in ida_destroy() 594 struct xarray *xa = &ida->xa; in ida_dump() local 595 pr_debug("ida: %p node %p free %d\n", ida, xa->xa_head, in ida_dump() 596 xa->xa_flags >> ROOT_TAG_SHIFT); in ida_dump() 597 ida_dump_entry(xa->xa_head, 0); in ida_dump()
|
| /Linux-v6.1/include/linux/ |
| D | xarray.h | 359 void *xa_find(struct xarray *xa, unsigned long *index, 361 void *xa_find_after(struct xarray *xa, unsigned long *index, 378 static inline void xa_init_flags(struct xarray *xa, gfp_t flags) in xa_init_flags() argument 380 spin_lock_init(&xa->xa_lock); in xa_init_flags() 381 xa->xa_flags = flags; in xa_init_flags() 382 xa->xa_head = NULL; in xa_init_flags() 393 static inline void xa_init(struct xarray *xa) in xa_init() argument 395 xa_init_flags(xa, 0); in xa_init() 405 static inline bool xa_empty(const struct xarray *xa) in xa_empty() argument 407 return xa->xa_head == NULL; in xa_empty() [all …]
|
| D | idr.h | 245 struct xarray xa; member 251 .xa = XARRAY_INIT(name, IDA_INIT_FLAGS) \ 314 xa_init_flags(&ida->xa, IDA_INIT_FLAGS); in ida_init() 327 return xa_empty(&ida->xa); in ida_is_empty()
|
| /Linux-v6.1/net/core/ |
| D | xdp.c | 51 const struct xdp_mem_allocator *xa = ptr; in xdp_mem_id_cmp() local 54 return xa->mem.id != mem_id; in xdp_mem_id_cmp() 71 struct xdp_mem_allocator *xa; in __xdp_mem_allocator_rcu_free() local 73 xa = container_of(rcu, struct xdp_mem_allocator, rcu); in __xdp_mem_allocator_rcu_free() 76 ida_simple_remove(&mem_id_pool, xa->mem.id); in __xdp_mem_allocator_rcu_free() 78 kfree(xa); in __xdp_mem_allocator_rcu_free() 81 static void mem_xa_remove(struct xdp_mem_allocator *xa) in mem_xa_remove() argument 83 trace_mem_disconnect(xa); in mem_xa_remove() 85 if (!rhashtable_remove_fast(mem_id_ht, &xa->node, mem_id_rht_params)) in mem_xa_remove() 86 call_rcu(&xa->rcu, __xdp_mem_allocator_rcu_free); in mem_xa_remove() [all …]
|
| /Linux-v6.1/tools/testing/radix-tree/ |
| D | multiorder.c | 15 static int item_insert_order(struct xarray *xa, unsigned long index, in item_insert_order() argument 18 XA_STATE_ORDER(xas, xa, index, order); in item_insert_order() 34 void multiorder_iteration(struct xarray *xa) in multiorder_iteration() argument 36 XA_STATE(xas, xa, 0); in multiorder_iteration() 47 err = item_insert_order(xa, index[i], order[i]); in multiorder_iteration() 71 item_kill_tree(xa); in multiorder_iteration() 74 void multiorder_tagged_iteration(struct xarray *xa) in multiorder_tagged_iteration() argument 76 XA_STATE(xas, xa, 0); in multiorder_tagged_iteration() 90 assert(!item_insert_order(xa, index[i], order[i])); in multiorder_tagged_iteration() 92 assert(!xa_marked(xa, XA_MARK_1)); in multiorder_tagged_iteration() [all …]
|
| D | iteration_check_2.c | 35 struct xarray *xa = arg; in throbber() local 43 xa_store(xa, i, xa_mk_value(i), GFP_KERNEL); in throbber() 44 xa_set_mark(xa, i, XA_MARK_0); in throbber() 47 xa_erase(xa, i); in throbber()
|
| D | test.c | 79 int item_delete_rcu(struct xarray *xa, unsigned long index) in item_delete_rcu() argument 81 struct item *item = xa_erase(xa, index); in item_delete_rcu() 173 int tag_tagged_items(struct xarray *xa, unsigned long start, unsigned long end, in tag_tagged_items() argument 176 XA_STATE(xas, xa, start); in tag_tagged_items() 255 void item_kill_tree(struct xarray *xa) in item_kill_tree() argument 257 XA_STATE(xas, xa, 0); in item_kill_tree() 267 assert(xa_empty(xa)); in item_kill_tree()
|
| D | iteration_check.c | 21 void my_item_insert(struct xarray *xa, unsigned long index) in my_item_insert() argument 23 XA_STATE(xas, xa, index); in my_item_insert()
|
| /Linux-v6.1/drivers/infiniband/core/ |
| D | restrack.c | 35 xa_init_flags(&rt[i].xa, XA_FLAGS_ALLOC); in rdma_restrack_init() 70 struct xarray *xa = &dev->res[i].xa; in rdma_restrack_clean() local 72 if (!xa_empty(xa)) { in rdma_restrack_clean() 79 xa_for_each(xa, index, e) { in rdma_restrack_clean() 99 xa_destroy(xa); in rdma_restrack_clean() 116 XA_STATE(xas, &rt->xa, 0); in rdma_restrack_count() 119 xa_lock(&rt->xa); in rdma_restrack_count() 122 xa_unlock(&rt->xa); in rdma_restrack_count() 247 ret = xa_insert(&rt->xa, res->id, res, GFP_KERNEL); in rdma_restrack_add() 255 ret = xa_insert(&rt->xa, counter->id, res, GFP_KERNEL); in rdma_restrack_add() [all …]
|
| D | counters.c | 290 xa_lock(&rt->xa); in rdma_get_counter_auto_mode() 291 xa_for_each(&rt->xa, id, res) { in rdma_get_counter_auto_mode() 305 xa_unlock(&rt->xa); in rdma_get_counter_auto_mode() 402 xa_lock(&rt->xa); in get_running_counters_hwstat_sum() 403 xa_for_each(&rt->xa, id, res) { in get_running_counters_hwstat_sum() 407 xa_unlock(&rt->xa); in get_running_counters_hwstat_sum() 417 xa_lock(&rt->xa); in get_running_counters_hwstat_sum() 421 xa_unlock(&rt->xa); in get_running_counters_hwstat_sum()
|
| /Linux-v6.1/drivers/iommu/ |
| D | ioasid.c | 54 struct xarray xa; member 72 .xa = XARRAY_INIT(ioasid_xa, XA_FLAGS_ALLOC), 81 if (xa_alloc(&default_allocator.xa, &id, opaque, XA_LIMIT(min, max), GFP_ATOMIC)) { in default_alloc() 93 ioasid_data = xa_erase(&default_allocator.xa, ioasid); in default_free() 106 xa_init_flags(&ia_data->xa, XA_FLAGS_ALLOC); in ioasid_alloc_allocator() 160 if (xa_empty(&active_allocator->xa)) { in ioasid_register_allocator() 229 WARN_ON(!xa_empty(&pallocator->xa)); in ioasid_unregister_allocator() 274 ioasid_data = xa_load(&active_allocator->xa, ioasid); in ioasid_set_data() 331 xa_alloc(&active_allocator->xa, &id, data, XA_LIMIT(id, id), GFP_ATOMIC)) { in ioasid_alloc() 357 ioasid_data = xa_load(&active_allocator->xa, ioasid); in ioasid_free() [all …]
|
| /Linux-v6.1/drivers/gpu/drm/i915/ |
| D | i915_drm_client.c | 32 struct xarray *xa = &clients->xarray; in i915_drm_client_add() local 39 xa_lock_irq(xa); in i915_drm_client_add() 40 ret = __xa_alloc_cyclic(xa, &client->id, client, xa_limit_32b, in i915_drm_client_add() 42 xa_unlock_irq(xa); in i915_drm_client_add() 63 struct xarray *xa = &client->clients->xarray; in __i915_drm_client_free() local 66 xa_lock_irqsave(xa, flags); in __i915_drm_client_free() 67 __xa_erase(xa, client->id); in __i915_drm_client_free() 68 xa_unlock_irqrestore(xa, flags); in __i915_drm_client_free()
|
| /Linux-v6.1/include/trace/events/ |
| D | xdp.h | 322 TP_PROTO(const struct xdp_mem_allocator *xa), 324 TP_ARGS(xa), 327 __field(const struct xdp_mem_allocator *, xa) 334 __entry->xa = xa; 335 __entry->mem_id = xa->mem.id; 336 __entry->mem_type = xa->mem.type; 337 __entry->allocator = xa->allocator; 349 TP_PROTO(const struct xdp_mem_allocator *xa, 352 TP_ARGS(xa, rxq), 355 __field(const struct xdp_mem_allocator *, xa) [all …]
|
| /Linux-v6.1/drivers/infiniband/sw/rxe/ |
| D | rxe_pool.c | 109 xa_init_flags(&pool->xa, XA_FLAGS_ALLOC); in rxe_pool_init() 116 WARN_ON(!xa_empty(&pool->xa)); in rxe_pool_cleanup() 145 err = xa_alloc_cyclic(&pool->xa, &elem->index, NULL, pool->limit, in rxe_alloc() 184 err = xa_alloc_cyclic(&pool->xa, &elem->index, NULL, pool->limit, in __rxe_add_to_pool() 199 struct xarray *xa = &pool->xa; in rxe_pool_get_index() local 203 elem = xa_load(xa, index); in rxe_pool_get_index() 223 struct xarray *xa = &pool->xa; in __rxe_cleanup() local 234 xa_ret = xa_erase(xa, elem->index); in __rxe_cleanup() 300 xa_ret = xa_store(&elem->pool->xa, elem->index, elem, GFP_KERNEL); in __rxe_finalize()
|
| /Linux-v6.1/io_uring/ |
| D | tctx.c | 51 WARN_ON_ONCE(!xa_empty(&tctx->xa)); in __io_uring_free() 84 xa_init(&tctx->xa); in io_uring_alloc_task_context() 115 if (!xa_load(&tctx->xa, (unsigned long)ctx)) { in __io_uring_add_tctx_node() 122 ret = xa_err(xa_store(&tctx->xa, (unsigned long)ctx, in __io_uring_add_tctx_node() 162 node = xa_erase(&tctx->xa, index); in io_uring_del_tctx_node() 184 xa_for_each(&tctx->xa, index, node) { in io_uring_clean_tctx()
|
| /Linux-v6.1/arch/powerpc/boot/dts/fsl/ |
| D | p2020ds.dtsi | 221 0x8800 0x0 0x0 0x2 &i8259 0xa 0x2 225 0x8900 0x0 0x0 0x2 &i8259 0xa 0x2 229 0x8a00 0x0 0x0 0x2 &i8259 0xa 0x2 233 0x8b00 0x0 0x0 0x2 &i8259 0xa 0x2 237 0x8c00 0x0 0x0 0x2 &i8259 0xa 0x2 241 0x8d00 0x0 0x0 0x2 &i8259 0xa 0x2 245 0x8e00 0x0 0x0 0x2 &i8259 0xa 0x2 249 0x8f00 0x0 0x0 0x2 &i8259 0xa 0x2
|
| /Linux-v6.1/drivers/infiniband/hw/hns/ |
| D | hns_roce_srq.c | 17 xa_lock(&srq_table->xa); in hns_roce_srq_event() 18 srq = xa_load(&srq_table->xa, srqn & (hr_dev->caps.num_srqs - 1)); in hns_roce_srq_event() 21 xa_unlock(&srq_table->xa); in hns_roce_srq_event() 125 ret = xa_err(xa_store(&srq_table->xa, srq->srqn, srq, GFP_KERNEL)); in alloc_srqc() 138 xa_erase(&srq_table->xa, srq->srqn); in alloc_srqc() 156 xa_erase(&srq_table->xa, srq->srqn); in free_srqc() 460 xa_init(&srq_table->xa); in hns_roce_init_srq_table()
|
| /Linux-v6.1/mm/ |
| D | list_lru.c | 56 struct list_lru_memcg *mlru = xa_load(&lru->xa, idx); in list_lru_from_memcg_idx() 312 xa_for_each(&lru->xa, index, mlru) { in list_lru_walk_node() 355 struct list_lru_memcg *mlru = xa_erase_irq(&lru->xa, src_idx); in memcg_list_lru_free() 370 xa_init_flags(&lru->xa, XA_FLAGS_LOCK_IRQ); in memcg_init_list_lru() 376 XA_STATE(xas, &lru->xa, 0); in memcg_destroy_list_lru() 469 return idx < 0 || xa_load(&lru->xa, idx); in memcg_list_lru_allocated() 481 XA_STATE(xas, &lru->xa, 0); in memcg_list_lru_alloc()
|
| /Linux-v6.1/Documentation/devicetree/bindings/input/touchscreen/ |
| D | eeti.txt | 5 - reg: I2C address of the chip. Should be set to <0xa> 25 reg = <0xa>;
|
| /Linux-v6.1/Documentation/devicetree/bindings/pci/ |
| D | pci-msi.txt | 67 reg = <0xa 0x1>; 95 reg = <0xa 0x1>; 124 reg = <0xa 0x1>; 154 reg = <0xa 0x1>; 184 reg = <0xa 0x1>;
|
| D | pci-iommu.txt | 56 reg = <0xa 0x1>; 83 reg = <0xa 0x1>; 111 reg = <0xa 0x1>; 139 reg = <0xa 0x1>;
|
| /Linux-v6.1/crypto/ |
| D | dh.c | 19 MPI xa; /* Value is guaranteed to be set. */ member 26 mpi_free(ctx->xa); in dh_clear_ctx() 39 return mpi_powm(val, base, ctx->xa, ctx->p); in _compute_val() 86 ctx->xa = mpi_read_raw_data(params.key, params.key_size); in dh_set_secret() 87 if (!ctx->xa) in dh_set_secret() 177 if (unlikely(!ctx->xa)) { in dh_compute_value()
|
| /Linux-v6.1/drivers/crypto/qat/qat_common/ |
| D | qat_asym_algs.c | 85 dma_addr_t xa; member 89 dma_addr_t xa; member 105 char *xa; member 232 if (unlikely(!ctx->xa)) in qat_dh_compute_value() 264 qat_req->in.dh.in.xa = ctx->dma_xa; in qat_dh_compute_value() 269 qat_req->in.dh.in_g2.xa = ctx->dma_xa; in qat_dh_compute_value() 274 qat_req->in.dh.in.xa = ctx->dma_xa; in qat_dh_compute_value() 433 if (ctx->xa) { in qat_dh_clear_ctx() 434 memset(ctx->xa, 0, ctx->p_size); in qat_dh_clear_ctx() 435 dma_free_coherent(dev, ctx->p_size, ctx->xa, ctx->dma_xa); in qat_dh_clear_ctx() [all …]
|