/Linux-v5.4/include/linux/ |
D | dma-debug.h | 37 int nelems, int dir); 62 int nelems, int direction); 66 int nelems, int direction); 106 int nelems, int dir) in debug_dma_unmap_sg() argument 146 int nelems, int direction) in debug_dma_sync_sg_for_cpu() argument 152 int nelems, int direction) in debug_dma_sync_sg_for_device() argument
|
D | pci-dma-compat.h | 94 int nelems, int direction) in pci_dma_sync_sg_for_cpu() argument 96 dma_sync_sg_for_cpu(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction); in pci_dma_sync_sg_for_cpu() 101 int nelems, int direction) in pci_dma_sync_sg_for_device() argument 103 dma_sync_sg_for_device(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction); in pci_dma_sync_sg_for_device()
|
D | dma-mapping.h | 409 int nelems, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument 415 dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu() 417 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu() 418 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu() 423 int nelems, enum dma_data_direction dir) in dma_sync_sg_for_device() argument 429 dma_direct_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device() 431 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device() 432 debug_dma_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device() 508 struct scatterlist *sg, int nelems, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument 512 struct scatterlist *sg, int nelems, enum dma_data_direction dir) in dma_sync_sg_for_device() argument
|
D | rhashtable.h | 186 return atomic_read(&ht->nelems) > (tbl->size / 4 * 3) && in rht_grow_above_75() 199 return atomic_read(&ht->nelems) < (tbl->size * 3 / 10) && in rht_shrink_below_30() 211 return atomic_read(&ht->nelems) > tbl->size && in rht_grow_above_100() 223 return atomic_read(&ht->nelems) >= ht->max_elems; in rht_grow_above_max() 794 atomic_inc(&ht->nelems); in __rhashtable_insert_fast() 1063 atomic_dec(&ht->nelems); in __rhashtable_remove_fast_one()
|
D | rhashtable-types.h | 90 atomic_t nelems; member
|
/Linux-v5.4/arch/powerpc/kernel/ |
D | dma-iommu.c | 91 int nelems, enum dma_data_direction direction, in dma_iommu_map_sg() argument 95 return dma_direct_map_sg(dev, sglist, nelems, direction, attrs); in dma_iommu_map_sg() 96 return ppc_iommu_map_sg(dev, get_iommu_table_base(dev), sglist, nelems, in dma_iommu_map_sg() 101 int nelems, enum dma_data_direction direction, in dma_iommu_unmap_sg() argument 105 ppc_iommu_unmap_sg(get_iommu_table_base(dev), sglist, nelems, in dma_iommu_unmap_sg() 108 dma_direct_unmap_sg(dev, sglist, nelems, direction, attrs); in dma_iommu_unmap_sg()
|
D | iommu.c | 419 struct scatterlist *sglist, int nelems, in ppc_iommu_map_sg() argument 432 if ((nelems == 0) || !tbl) in ppc_iommu_map_sg() 437 incount = nelems; in ppc_iommu_map_sg() 443 DBG("sg mapping %d elements:\n", nelems); in ppc_iommu_map_sg() 446 for_each_sg(sglist, s, nelems, i) { in ppc_iommu_map_sg() 545 for_each_sg(sglist, s, nelems, i) { in ppc_iommu_map_sg() 564 int nelems, enum dma_data_direction direction, in ppc_iommu_unmap_sg() argument 575 while (nelems--) { in ppc_iommu_unmap_sg()
|
/Linux-v5.4/drivers/xen/ |
D | swiotlb-xen.c | 466 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument 474 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg() 481 xen_swiotlb_map_sg(struct device *dev, struct scatterlist *sgl, int nelems, in xen_swiotlb_map_sg() argument 489 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg() 497 return nelems; in xen_swiotlb_map_sg() 506 int nelems, enum dma_data_direction dir) in xen_swiotlb_sync_sg_for_cpu() argument 511 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_cpu() 519 int nelems, enum dma_data_direction dir) in xen_swiotlb_sync_sg_for_device() argument 524 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_device()
|
/Linux-v5.4/arch/sparc/kernel/ |
D | iommu.c | 433 int nelems, enum dma_data_direction direction, in dma_4u_map_sg() argument 450 if (nelems == 0 || !iommu) in dma_4u_map_sg() 468 incount = nelems; in dma_4u_map_sg() 478 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg() 557 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg() 609 int nelems, enum dma_data_direction direction, in dma_4u_unmap_sg() argument 627 while (nelems--) { in dma_4u_unmap_sg() 699 struct scatterlist *sglist, int nelems, in dma_4u_sync_sg_for_cpu() argument 731 for_each_sg(sglist, sg, nelems, i) { in dma_4u_sync_sg_for_cpu()
|
D | pci_sun4v.c | 468 int nelems, enum dma_data_direction direction, in dma_4v_map_sg() argument 487 if (nelems == 0 || !iommu) in dma_4v_map_sg() 500 incount = nelems; in dma_4v_map_sg() 522 for_each_sg(sglist, s, nelems, i) { in dma_4v_map_sg() 604 for_each_sg(sglist, s, nelems, i) { in dma_4v_map_sg() 626 int nelems, enum dma_data_direction direction, in dma_4v_unmap_sg() argument 647 while (nelems--) { in dma_4v_unmap_sg()
|
/Linux-v5.4/arch/x86/kernel/ |
D | amd_gart_64.c | 326 int nelems, struct scatterlist *sout, in __dma_map_cont() argument 337 for_each_sg(start, s, nelems, i) { in __dma_map_cont() 364 dma_map_cont(struct device *dev, struct scatterlist *start, int nelems, in dma_map_cont() argument 368 BUG_ON(nelems != 1); in dma_map_cont() 373 return __dma_map_cont(dev, start, nelems, sout, pages); in dma_map_cont()
|
D | pci-calgary_64.c | 326 int nelems,enum dma_data_direction dir, in calgary_unmap_sg() argument 336 for_each_sg(sglist, s, nelems, i) { in calgary_unmap_sg() 350 int nelems, enum dma_data_direction dir, in calgary_map_sg() argument 360 for_each_sg(sg, s, nelems, i) { in calgary_map_sg() 381 return nelems; in calgary_map_sg() 383 calgary_unmap_sg(dev, sg, nelems, dir, 0); in calgary_map_sg() 384 for_each_sg(sg, s, nelems, i) { in calgary_map_sg()
|
/Linux-v5.4/kernel/dma/ |
D | debug.c | 1400 int nelems, int dir) in debug_dma_unmap_sg() argument 1408 for_each_sg(sglist, s, nelems, i) { in debug_dma_unmap_sg() 1418 .sg_call_ents = nelems, in debug_dma_unmap_sg() 1576 int nelems, int direction) in debug_dma_sync_sg_for_cpu() argument 1584 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu() 1594 .sg_call_ents = nelems, in debug_dma_sync_sg_for_cpu() 1609 int nelems, int direction) in debug_dma_sync_sg_for_device() argument 1617 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_device() 1627 .sg_call_ents = nelems, in debug_dma_sync_sg_for_device()
|
D | dummy.c | 22 int nelems, enum dma_data_direction dir, in dma_dummy_map_sg() argument
|
/Linux-v5.4/tools/lib/bpf/ |
D | btf.c | 233 __u32 nelems = 1; in btf__resolve_size() local 260 if (nelems && array->nelems > UINT32_MAX / nelems) in btf__resolve_size() 262 nelems *= array->nelems; in btf__resolve_size() 276 if (nelems && size > UINT32_MAX / nelems) in btf__resolve_size() 279 return nelems * size; in btf__resolve_size() 1873 h = hash_combine(h, info->nelems); in btf_hash_array() 1895 info1->nelems == info2->nelems; in btf_equal_array() 1908 return btf_array(t1)->nelems == btf_array(t2)->nelems; in btf_compat_array()
|
/Linux-v5.4/include/uapi/linux/ |
D | btf.h | 107 __u32 nelems; member
|
/Linux-v5.4/tools/include/uapi/linux/ |
D | btf.h | 107 __u32 nelems; member
|
/Linux-v5.4/arch/powerpc/include/asm/ |
D | iommu.h | 253 struct scatterlist *sglist, int nelems, 259 int nelems,
|
/Linux-v5.4/lib/ |
D | rhashtable.c | 381 unsigned int nelems = atomic_read(&ht->nelems); in rhashtable_shrink() local 384 if (nelems) in rhashtable_shrink() 385 size = roundup_pow_of_two(nelems * 3 / 2); in rhashtable_shrink() 573 atomic_inc(&ht->nelems); in rhashtable_insert_one() 1060 atomic_set(&ht->nelems, 0); in rhashtable_init()
|
/Linux-v5.4/net/netfilter/ |
D | nft_dynset.c | 52 if (!atomic_add_unless(&set->nelems, 1, set->size)) in nft_dynset_new() 74 atomic_dec(&set->nelems); in nft_dynset_new()
|
D | nft_set_rbtree.c | 391 atomic_dec(&set->nelems); in nft_rbtree_gc() 396 atomic_dec(&set->nelems); in nft_rbtree_gc()
|
/Linux-v5.4/arch/powerpc/platforms/pseries/ |
D | vio.c | 549 int nelems, enum dma_data_direction direction, in vio_dma_iommu_map_sg() argument 558 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_map_sg() 563 ret = ppc_iommu_map_sg(dev, tbl, sglist, nelems, dma_get_mask(dev), in vio_dma_iommu_map_sg() 582 struct scatterlist *sglist, int nelems, in vio_dma_iommu_unmap_sg() argument 592 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_unmap_sg() 595 ppc_iommu_unmap_sg(tbl, sglist, nelems, direction, attrs); in vio_dma_iommu_unmap_sg()
|
/Linux-v5.4/drivers/iommu/ |
D | intel-iommu.c | 3698 int nelems, enum dma_data_direction dir, in intel_unmap_sg() argument 3707 return dma_direct_unmap_sg(dev, sglist, nelems, dir, attrs); in intel_unmap_sg() 3709 for_each_sg(sglist, sg, nelems, i) { in intel_unmap_sg() 3718 static int intel_map_sg(struct device *dev, struct scatterlist *sglist, int nelems, in intel_map_sg() argument 3733 return dma_direct_map_sg(dev, sglist, nelems, dir, attrs); in intel_map_sg() 3741 for_each_sg(sglist, sg, nelems, i) in intel_map_sg() 3775 return nelems; in intel_map_sg() 3958 bounce_unmap_sg(struct device *dev, struct scatterlist *sglist, int nelems, in bounce_unmap_sg() argument 3964 for_each_sg(sglist, sg, nelems, i) in bounce_unmap_sg() 3970 bounce_map_sg(struct device *dev, struct scatterlist *sglist, int nelems, in bounce_map_sg() argument [all …]
|
D | dma-iommu.c | 678 struct scatterlist *sgl, int nelems, in iommu_dma_sync_sg_for_cpu() argument 687 for_each_sg(sgl, sg, nelems, i) in iommu_dma_sync_sg_for_cpu() 692 struct scatterlist *sgl, int nelems, in iommu_dma_sync_sg_for_device() argument 701 for_each_sg(sgl, sg, nelems, i) in iommu_dma_sync_sg_for_device()
|
/Linux-v5.4/tools/bpf/bpftool/ |
D | btf_dumper.c | 65 for (i = 0; i < arr->nelems; i++) { in btf_dumper_array() 456 BTF_PRINT_ARG("[%d]", array->nelems); in __btf_dumper_type_only()
|