| /Linux-v5.4/lib/ | 
| D | bucket_locks.c | 34 		tlocks = kvmalloc_array(size, sizeof(spinlock_t), gfp);  in __alloc_bucket_spinlocks()
 | 
| /Linux-v5.4/drivers/gpu/drm/ttm/ | 
| D | ttm_tt.c | 87 	ttm->pages = kvmalloc_array(ttm->num_pages, sizeof(void*),  in ttm_tt_alloc_page_directory()96 	ttm->ttm.pages = kvmalloc_array(ttm->ttm.num_pages,  in ttm_dma_tt_alloc_page_directory()
 108 	ttm->dma_address = kvmalloc_array(ttm->ttm.num_pages,  in ttm_sg_tt_alloc_page_directory()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ | 
| D | mem.c | 194 	if (!(mem->mem = kvmalloc_array(size, sizeof(*mem->mem), GFP_KERNEL)))  in nvkm_mem_new_host()196 	if (!(mem->dma = kvmalloc_array(size, sizeof(*mem->dma), GFP_KERNEL)))  in nvkm_mem_new_host()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/etnaviv/ | 
| D | etnaviv_gem_submit.c | 476 	bos = kvmalloc_array(args->nr_bos, sizeof(*bos), GFP_KERNEL);  in etnaviv_ioctl_gem_submit()477 	relocs = kvmalloc_array(args->nr_relocs, sizeof(*relocs), GFP_KERNEL);  in etnaviv_ioctl_gem_submit()
 478 	pmrs = kvmalloc_array(args->nr_pmrs, sizeof(*pmrs), GFP_KERNEL);  in etnaviv_ioctl_gem_submit()
 479 	stream = kvmalloc_array(1, args->stream_size, GFP_KERNEL);  in etnaviv_ioctl_gem_submit()
 
 | 
| D | etnaviv_gem_prime.c | 123 	etnaviv_obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);  in etnaviv_gem_prime_import_sg_table()
 | 
| /Linux-v5.4/drivers/gpu/drm/nouveau/dispnv50/ | 
| D | lut.c | 41 		in = kvmalloc_array(1024, sizeof(*in), GFP_KERNEL);  in nv50_lut_load()
 | 
| /Linux-v5.4/drivers/gpu/drm/panfrost/ | 
| D | panfrost_drv.c | 127 	job->implicit_fences = kvmalloc_array(job->bo_count,  in panfrost_lookup_bos()166 	job->in_fences = kvmalloc_array(job->in_fence_count,  in panfrost_copy_in_sync()
 174 	handles = kvmalloc_array(job->in_fence_count, sizeof(u32), GFP_KERNEL);  in panfrost_copy_in_sync()
 
 | 
| D | panfrost_mmu.c | 465 		bo->sgts = kvmalloc_array(bo->base.base.size / SZ_2M,  in panfrost_mmu_map_fault_addr()473 		pages = kvmalloc_array(bo->base.base.size >> PAGE_SHIFT,  in panfrost_mmu_map_fault_addr()
 
 | 
| /Linux-v5.4/tools/virtio/ringtest/ | 
| D | ptr_ring.c | 61 #define kvmalloc_array kmalloc_array  macro
 | 
| /Linux-v5.4/drivers/gpu/drm/exynos/ | 
| D | exynos_drm_gem.c | 58 	exynos_gem->pages = kvmalloc_array(nr_pages, sizeof(struct page *),  in exynos_drm_alloc_buf()490 	exynos_gem->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);  in exynos_drm_gem_prime_import_sg_table()
 
 | 
| /Linux-v5.4/drivers/block/ | 
| D | null_blk_zoned.c | 27 	dev->zones = kvmalloc_array(dev->nr_zones, sizeof(struct blk_zone),  in null_zone_init()
 | 
| /Linux-v5.4/drivers/gpu/drm/ | 
| D | drm_gem.c | 571 	pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);  in drm_gem_get_pages()708 	objs = kvmalloc_array(count, sizeof(struct drm_gem_object *),  in drm_gem_objects_lookup()
 713 	handles = kvmalloc_array(count, sizeof(u32), GFP_KERNEL);  in drm_gem_objects_lookup()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/virtio/ | 
| D | virtgpu_ioctl.c | 163 		bo_handles = kvmalloc_array(exbuf->num_bo_handles,  in virtio_gpu_execbuffer_ioctl()165 		buflist = kvmalloc_array(exbuf->num_bo_handles,  in virtio_gpu_execbuffer_ioctl()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/xen/ | 
| D | xen_drm_front_gem.c | 48 	xen_obj->pages = kvmalloc_array(xen_obj->num_pages,  in gem_alloc_pages_array()
 | 
| /Linux-v5.4/drivers/gpu/drm/udl/ | 
| D | udl_dmabuf.c | 203 	obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);  in udl_prime_create()
 | 
| /Linux-v5.4/drivers/staging/media/ipu3/ | 
| D | ipu3-dmamap.c | 42 	pages = kvmalloc_array(count, sizeof(*pages), GFP_KERNEL);  in imgu_dmamap_alloc_buffer()
 | 
| /Linux-v5.4/drivers/gpu/drm/i915/gem/ | 
| D | i915_gem_userptr.c | 483 	pvec = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);  in __i915_gem_userptr_get_pages_worker()617 		pvec = kvmalloc_array(num_pages, sizeof(struct page *),  in i915_gem_userptr_get_pages()
 
 | 
| D | i915_gem_execbuffer.c | 1600 		relocs = kvmalloc_array(size, 1, GFP_KERNEL);  in eb_copy_relocations()2376 	fences = kvmalloc_array(nfences, sizeof(*fences),  in get_fence_array()
 2785 	exec_list = kvmalloc_array(count, sizeof(*exec_list),  in i915_gem_execbuffer_ioctl()
 2787 	exec2_list = kvmalloc_array(count + 1, eb_element_size(),  in i915_gem_execbuffer_ioctl()
 2863 	exec2_list = kvmalloc_array(count + 1, eb_element_size(),  in i915_gem_execbuffer2_ioctl()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/v3d/ | 
| D | v3d_gem.c | 307 	job->bo = kvmalloc_array(job->bo_count,  in v3d_lookup_bos()315 	handles = kvmalloc_array(job->bo_count, sizeof(u32), GFP_KERNEL);  in v3d_lookup_bos()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ | 
| D | amdgpu_bo_list.c | 231 	info = kvmalloc_array(in->bo_number, info_size, GFP_KERNEL);  in amdgpu_bo_create_list_entry_array()
 | 
| D | amdgpu_vram_mgr.c | 308 	nodes = kvmalloc_array((uint32_t)num_nodes, sizeof(*nodes),  in amdgpu_vram_mgr_new()
 | 
| /Linux-v5.4/drivers/gpu/drm/i915/gt/ | 
| D | selftest_timeline.c | 512 	timelines = kvmalloc_array(NUM_TIMELINES * I915_NUM_ENGINES,  in live_hwsp_engine()588 	timelines = kvmalloc_array(NUM_TIMELINES * I915_NUM_ENGINES,  in live_hwsp_alternate()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/vc4/ | 
| D | vc4_gem.c | 747 	exec->bo = kvmalloc_array(exec->bo_count,  in vc4_cl_lookup_bos()755 	handles = kvmalloc_array(exec->bo_count, sizeof(uint32_t), GFP_KERNEL);  in vc4_cl_lookup_bos()
 856 	temp = kvmalloc_array(temp_size, 1, GFP_KERNEL);  in vc4_get_bcl()
 
 | 
| /Linux-v5.4/drivers/gpu/drm/radeon/ | 
| D | radeon_cs.c | 96 	p->relocs = kvmalloc_array(p->nrelocs, sizeof(struct radeon_bo_list),  in radeon_cs_parser_relocs()351 		p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), GFP_KERNEL);  in radeon_cs_parser_init()
 
 | 
| /Linux-v5.4/block/ | 
| D | blk-zoned.c | 344 	zones = kvmalloc_array(rep.nr_zones, sizeof(struct blk_zone),  in blkdev_report_zones_ioctl()
 |