/Linux-v4.19/drivers/gpu/drm/armada/ |
D | armada_gem.c | 391 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 414 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 423 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
|
/Linux-v4.19/drivers/staging/android/ion/ |
D | ion_system_heap.c | 131 if (sg_alloc_table(table, i, GFP_KERNEL)) in ion_system_heap_allocate() 333 ret = sg_alloc_table(table, 1, GFP_KERNEL); in ion_system_contig_heap_allocate()
|
D | ion_cma_heap.c | 66 ret = sg_alloc_table(table, 1, GFP_KERNEL); in ion_cma_allocate()
|
D | ion_carveout_heap.c | 62 ret = sg_alloc_table(table, 1, GFP_KERNEL); in ion_carveout_heap_allocate()
|
D | ion_chunk_heap.c | 48 ret = sg_alloc_table(table, num_chunks, GFP_KERNEL); in ion_chunk_heap_allocate()
|
/Linux-v4.19/lib/ |
D | scatterlist.c | 347 int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) in sg_alloc_table() function 358 EXPORT_SYMBOL(sg_alloc_table); 405 ret = sg_alloc_table(sgt, chunks, gfp_mask); in __sg_alloc_table_from_pages()
|
/Linux-v4.19/drivers/gpu/drm/i915/selftests/ |
D | huge_gem_object.c | 53 if (sg_alloc_table(pages, npages, GFP)) { in huge_get_pages()
|
D | mock_dmabuf.c | 39 err = sg_alloc_table(st, mock->npages, GFP_KERNEL); in mock_map_dma_buf()
|
D | scatterlist.c | 222 if (sg_alloc_table(&pt->st, max, in alloc_table()
|
D | huge_pages.c | 79 if (sg_alloc_table(st, obj->base.size >> PAGE_SHIFT, GFP)) { in get_huge_pages() 203 if (sg_alloc_table(st, obj->base.size >> PAGE_SHIFT, GFP)) { in fake_get_huge_pages() 256 if (sg_alloc_table(st, 1, GFP)) { in fake_get_huge_pages_single()
|
/Linux-v4.19/drivers/gpu/drm/i915/ |
D | i915_gem_internal.c | 85 if (sg_alloc_table(st, npages, GFP_KERNEL)) { in i915_gem_object_get_pages_internal()
|
D | i915_gem_dmabuf.c | 58 ret = sg_alloc_table(st, obj->mm.pages->nents, GFP_KERNEL); in i915_gem_map_dma_buf()
|
D | i915_gem_stolen.c | 506 if (sg_alloc_table(st, 1, GFP_KERNEL)) { in i915_pages_create_for_stolen()
|
/Linux-v4.19/drivers/gpu/drm/omapdrm/ |
D | omap_gem_dmabuf.c | 46 ret = sg_alloc_table(sg, 1, GFP_KERNEL); in omap_gem_map_dma_buf()
|
/Linux-v4.19/drivers/mmc/core/ |
D | sdio_ops.c | 157 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended()
|
/Linux-v4.19/drivers/gpu/drm/udl/ |
D | udl_dmabuf.c | 109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf()
|
/Linux-v4.19/drivers/gpu/drm/tegra/ |
D | gem.c | 514 if (sg_alloc_table(sgt, bo->num_pages, GFP_KERNEL)) in tegra_gem_prime_map_dma_buf() 523 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in tegra_gem_prime_map_dma_buf()
|
/Linux-v4.19/include/linux/ |
D | scatterlist.h | 273 int sg_alloc_table(struct sg_table *, unsigned int, gfp_t);
|
/Linux-v4.19/kernel/dma/ |
D | mapping.c | 210 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in dma_common_get_sgtable()
|
/Linux-v4.19/drivers/infiniband/core/ |
D | umem.c | 166 ret = sg_alloc_table(&umem->sg_head, npages, GFP_KERNEL); in ib_umem_get()
|
/Linux-v4.19/net/ceph/ |
D | crypto.c | 180 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable()
|
/Linux-v4.19/drivers/media/common/videobuf2/ |
D | videobuf2-vmalloc.c | 229 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach()
|
/Linux-v4.19/drivers/crypto/qce/ |
D | ablkcipher.c | 100 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); in qce_ablkcipher_async_req_handle()
|
/Linux-v4.19/drivers/crypto/ccp/ |
D | ccp-crypto-aes-cmac.c | 113 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); in ccp_do_cmac_update()
|
/Linux-v4.19/drivers/gpu/drm/i915/gvt/ |
D | dmabuf.c | 58 ret = sg_alloc_table(st, fb_info->size, GFP_KERNEL); in vgpu_gem_get_pages()
|