/Linux-v4.19/arch/arm64/mm/ |
D | dma-mapping.c | 184 int nelems, enum dma_data_direction dir, in __swiotlb_map_sg_attrs() argument 190 ret = swiotlb_map_sg_attrs(dev, sgl, nelems, dir, attrs); in __swiotlb_map_sg_attrs() 201 struct scatterlist *sgl, int nelems, in __swiotlb_unmap_sg_attrs() argument 210 for_each_sg(sgl, sg, nelems, i) in __swiotlb_unmap_sg_attrs() 213 swiotlb_unmap_sg_attrs(dev, sgl, nelems, dir, attrs); in __swiotlb_unmap_sg_attrs() 235 struct scatterlist *sgl, int nelems, in __swiotlb_sync_sg_for_cpu() argument 242 for_each_sg(sgl, sg, nelems, i) in __swiotlb_sync_sg_for_cpu() 245 swiotlb_sync_sg_for_cpu(dev, sgl, nelems, dir); in __swiotlb_sync_sg_for_cpu() 249 struct scatterlist *sgl, int nelems, in __swiotlb_sync_sg_for_device() argument 255 swiotlb_sync_sg_for_device(dev, sgl, nelems, dir); in __swiotlb_sync_sg_for_device() [all …]
|
/Linux-v4.19/include/linux/ |
D | dma-debug.h | 49 int nelems, int dir); 85 int nelems, int direction); 89 int nelems, int direction); 131 int nelems, int dir) in debug_dma_unmap_sg() argument 187 int nelems, int direction) in debug_dma_sync_sg_for_cpu() argument 193 int nelems, int direction) in debug_dma_sync_sg_for_device() argument
|
D | swiotlb.h | 84 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, 90 int nelems, enum dma_data_direction dir, 99 int nelems, enum dma_data_direction dir); 107 int nelems, enum dma_data_direction dir);
|
D | pci-dma-compat.h | 94 int nelems, int direction) in pci_dma_sync_sg_for_cpu() argument 96 dma_sync_sg_for_cpu(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction); in pci_dma_sync_sg_for_cpu() 101 int nelems, int direction) in pci_dma_sync_sg_for_device() argument 103 dma_sync_sg_for_device(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction); in pci_dma_sync_sg_for_device()
|
D | rhashtable.h | 159 return atomic_read(&ht->nelems) > (tbl->size / 4 * 3) && in rht_grow_above_75() 172 return atomic_read(&ht->nelems) < (tbl->size * 3 / 10) && in rht_shrink_below_30() 184 return atomic_read(&ht->nelems) > tbl->size && in rht_grow_above_100() 196 return atomic_read(&ht->nelems) >= ht->max_elems; in rht_grow_above_max() 663 atomic_inc(&ht->nelems); in __rhashtable_insert_fast() 935 atomic_dec(&ht->nelems); in __rhashtable_remove_fast_one()
|
D | dma-mapping.h | 406 int nelems, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument 412 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu() 413 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu() 418 int nelems, enum dma_data_direction dir) in dma_sync_sg_for_device() argument 424 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device() 425 debug_dma_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
|
D | rhashtable-types.h | 92 atomic_t nelems; member
|
/Linux-v4.19/tools/lib/bpf/ |
D | btf.c | 228 __u32 nelems = 1; in btf__resolve_size() local 248 if (nelems && array->nelems > UINT32_MAX / nelems) in btf__resolve_size() 250 nelems *= array->nelems; in btf__resolve_size() 263 if (nelems && size > UINT32_MAX / nelems) in btf__resolve_size() 266 return nelems * size; in btf__resolve_size()
|
/Linux-v4.19/arch/powerpc/kernel/ |
D | dma-iommu.c | 61 int nelems, enum dma_data_direction direction, in dma_iommu_map_sg() argument 64 return ppc_iommu_map_sg(dev, get_iommu_table_base(dev), sglist, nelems, in dma_iommu_map_sg() 69 int nelems, enum dma_data_direction direction, in dma_iommu_unmap_sg() argument 72 ppc_iommu_unmap_sg(get_iommu_table_base(dev), sglist, nelems, in dma_iommu_unmap_sg()
|
D | iommu.c | 431 struct scatterlist *sglist, int nelems, in ppc_iommu_map_sg() argument 444 if ((nelems == 0) || !tbl) in ppc_iommu_map_sg() 449 incount = nelems; in ppc_iommu_map_sg() 455 DBG("sg mapping %d elements:\n", nelems); in ppc_iommu_map_sg() 458 for_each_sg(sglist, s, nelems, i) { in ppc_iommu_map_sg() 557 for_each_sg(sglist, s, nelems, i) { in ppc_iommu_map_sg() 576 int nelems, enum dma_data_direction direction, in ppc_iommu_unmap_sg() argument 587 while (nelems--) { in ppc_iommu_unmap_sg()
|
/Linux-v4.19/drivers/xen/ |
D | swiotlb-xen.c | 517 int nelems, enum dma_data_direction dir, in xen_swiotlb_unmap_sg_attrs() argument 525 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg_attrs() 548 int nelems, enum dma_data_direction dir, in xen_swiotlb_map_sg_attrs() argument 556 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg_attrs() 601 return nelems; in xen_swiotlb_map_sg_attrs() 613 int nelems, enum dma_data_direction dir, in xen_swiotlb_sync_sg() argument 619 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_sync_sg() 626 int nelems, enum dma_data_direction dir) in xen_swiotlb_sync_sg_for_cpu() argument 628 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); in xen_swiotlb_sync_sg_for_cpu() 633 int nelems, enum dma_data_direction dir) in xen_swiotlb_sync_sg_for_device() argument [all …]
|
/Linux-v4.19/kernel/dma/ |
D | swiotlb.c | 938 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg_attrs() argument 946 for_each_sg(sgl, sg, nelems, i) { in swiotlb_map_sg_attrs() 969 return nelems; in swiotlb_map_sg_attrs() 978 int nelems, enum dma_data_direction dir, in swiotlb_unmap_sg_attrs() argument 986 for_each_sg(sgl, sg, nelems, i) in swiotlb_unmap_sg_attrs() 1000 int nelems, enum dma_data_direction dir, in swiotlb_sync_sg() argument 1006 for_each_sg(sgl, sg, nelems, i) in swiotlb_sync_sg() 1013 int nelems, enum dma_data_direction dir) in swiotlb_sync_sg_for_cpu() argument 1015 swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); in swiotlb_sync_sg_for_cpu() 1020 int nelems, enum dma_data_direction dir) in swiotlb_sync_sg_for_device() argument [all …]
|
D | debug.c | 1472 int nelems, int dir) in debug_dma_unmap_sg() argument 1480 for_each_sg(sglist, s, nelems, i) { in debug_dma_unmap_sg() 1490 .sg_call_ents = nelems, in debug_dma_unmap_sg() 1692 int nelems, int direction) in debug_dma_sync_sg_for_cpu() argument 1700 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu() 1710 .sg_call_ents = nelems, in debug_dma_sync_sg_for_cpu() 1725 int nelems, int direction) in debug_dma_sync_sg_for_device() argument 1733 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_device() 1743 .sg_call_ents = nelems, in debug_dma_sync_sg_for_device()
|
/Linux-v4.19/arch/sparc/kernel/ |
D | iommu.c | 434 int nelems, enum dma_data_direction direction, in dma_4u_map_sg() argument 451 if (nelems == 0 || !iommu) in dma_4u_map_sg() 469 incount = nelems; in dma_4u_map_sg() 479 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg() 558 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg() 610 int nelems, enum dma_data_direction direction, in dma_4u_unmap_sg() argument 628 while (nelems--) { in dma_4u_unmap_sg() 700 struct scatterlist *sglist, int nelems, in dma_4u_sync_sg_for_cpu() argument 732 for_each_sg(sglist, sg, nelems, i) { in dma_4u_sync_sg_for_cpu()
|
D | pci_sun4v.c | 466 int nelems, enum dma_data_direction direction, in dma_4v_map_sg() argument 485 if (nelems == 0 || !iommu) in dma_4v_map_sg() 498 incount = nelems; in dma_4v_map_sg() 520 for_each_sg(sglist, s, nelems, i) { in dma_4v_map_sg() 602 for_each_sg(sglist, s, nelems, i) { in dma_4v_map_sg() 624 int nelems, enum dma_data_direction direction, in dma_4v_unmap_sg() argument 645 while (nelems--) { in dma_4v_unmap_sg()
|
/Linux-v4.19/arch/x86/kernel/ |
D | amd_gart_64.c | 336 int nelems, struct scatterlist *sout, in __dma_map_cont() argument 347 for_each_sg(start, s, nelems, i) { in __dma_map_cont() 374 dma_map_cont(struct device *dev, struct scatterlist *start, int nelems, in dma_map_cont() argument 378 BUG_ON(nelems != 1); in dma_map_cont() 383 return __dma_map_cont(dev, start, nelems, sout, pages); in dma_map_cont()
|
D | pci-calgary_64.c | 345 int nelems,enum dma_data_direction dir, in calgary_unmap_sg() argument 355 for_each_sg(sglist, s, nelems, i) { in calgary_unmap_sg() 369 int nelems, enum dma_data_direction dir, in calgary_map_sg() argument 379 for_each_sg(sg, s, nelems, i) { in calgary_map_sg() 400 return nelems; in calgary_map_sg() 402 calgary_unmap_sg(dev, sg, nelems, dir, 0); in calgary_map_sg() 403 for_each_sg(sg, s, nelems, i) { in calgary_map_sg()
|
/Linux-v4.19/tools/include/uapi/linux/ |
D | btf.h | 99 __u32 nelems; member
|
/Linux-v4.19/include/uapi/linux/ |
D | btf.h | 99 __u32 nelems; member
|
/Linux-v4.19/arch/powerpc/include/asm/ |
D | iommu.h | 262 struct scatterlist *sglist, int nelems, 268 int nelems,
|
/Linux-v4.19/lib/ |
D | rhashtable.c | 383 unsigned int nelems = atomic_read(&ht->nelems); in rhashtable_shrink() local 386 if (nelems) in rhashtable_shrink() 387 size = roundup_pow_of_two(nelems * 3 / 2); in rhashtable_shrink() 568 atomic_inc(&ht->nelems); in rhashtable_insert_one() 1071 atomic_set(&ht->nelems, 0); in rhashtable_init()
|
/Linux-v4.19/arch/ia64/sn/pci/ |
D | pci_dma.c | 331 int nelems, enum dma_data_direction dir) in sn_dma_sync_sg_for_cpu() argument 337 int nelems, enum dma_data_direction dir) in sn_dma_sync_sg_for_device() argument
|
/Linux-v4.19/net/netfilter/ |
D | nft_dynset.c | 39 if (!atomic_add_unless(&set->nelems, 1, set->size)) in nft_dynset_new() 61 atomic_dec(&set->nelems); in nft_dynset_new()
|
D | nft_set_rbtree.c | 389 atomic_dec(&set->nelems); in nft_rbtree_gc() 394 atomic_dec(&set->nelems); in nft_rbtree_gc()
|
/Linux-v4.19/arch/powerpc/platforms/pseries/ |
D | vio.c | 554 int nelems, enum dma_data_direction direction, in vio_dma_iommu_map_sg() argument 564 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_map_sg() 572 ret = dma_iommu_ops.map_sg(dev, sglist, nelems, direction, attrs); in vio_dma_iommu_map_sg() 589 struct scatterlist *sglist, int nelems, in vio_dma_iommu_unmap_sg() argument 600 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_unmap_sg() 603 dma_iommu_ops.unmap_sg(dev, sglist, nelems, direction, attrs); in vio_dma_iommu_unmap_sg()
|