Home
last modified time | relevance | path

Searched refs:dma_list (Results 1 – 17 of 17) sorted by relevance

/Linux-v4.19/drivers/infiniband/hw/mthca/
Dmthca_allocator.c201 u64 *dma_list = NULL; in mthca_buf_alloc() local
224 dma_list = kmalloc_array(npages, sizeof(*dma_list), in mthca_buf_alloc()
226 if (!dma_list) in mthca_buf_alloc()
230 dma_list[i] = t + i * (1 << shift); in mthca_buf_alloc()
236 dma_list = kmalloc_array(npages, sizeof(*dma_list), in mthca_buf_alloc()
238 if (!dma_list) in mthca_buf_alloc()
257 dma_list[i] = t; in mthca_buf_alloc()
265 dma_list, shift, npages, in mthca_buf_alloc()
273 kfree(dma_list); in mthca_buf_alloc()
281 kfree(dma_list); in mthca_buf_alloc()
Dmthca_eq.c471 u64 *dma_list = NULL; in mthca_create_eq() local
490 dma_list = kmalloc_array(npages, sizeof(*dma_list), GFP_KERNEL); in mthca_create_eq()
491 if (!dma_list) in mthca_create_eq()
505 dma_list[i] = t; in mthca_create_eq()
519 dma_list, PAGE_SHIFT, npages, in mthca_create_eq()
551 kfree(dma_list); in mthca_create_eq()
582 kfree(dma_list); in mthca_create_eq()
/Linux-v4.19/arch/powerpc/platforms/cell/spufs/
Dspu_utils.h51 struct dma_list_elem dma_list[15] __attribute__ ((aligned(8))); variable
105 dma_list[i].size = 16384; in build_dma_list()
106 dma_list[i].ea_low = ea_low; in build_dma_list()
Dspu_save.c64 unsigned int list = (unsigned int)&dma_list[0]; in save_upper_240kb()
65 unsigned int size = sizeof(dma_list); in save_upper_240kb()
Dspu_restore.c64 unsigned int list = (unsigned int)&dma_list[0]; in restore_upper_240kb()
65 unsigned int size = sizeof(dma_list); in restore_upper_240kb()
/Linux-v4.19/drivers/misc/genwqe/
Dcard_utils.c236 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, in genwqe_unmap_pages() argument
242 for (i = 0; (i < num_pages) && (dma_list[i] != 0x0); i++) { in genwqe_unmap_pages()
243 pci_unmap_page(pci_dev, dma_list[i], in genwqe_unmap_pages()
245 dma_list[i] = 0x0; in genwqe_unmap_pages()
251 dma_addr_t *dma_list) in genwqe_map_pages() argument
260 dma_list[i] = 0x0; in genwqe_map_pages()
273 dma_list[i] = daddr; in genwqe_map_pages()
278 genwqe_unmap_pages(cd, dma_list, num_pages); in genwqe_map_pages()
381 dma_addr_t *dma_list) in genwqe_setup_sgl() argument
416 daddr = dma_list[p] + map_offs; in genwqe_setup_sgl()
[all …]
Dcard_base.h179 dma_addr_t *dma_list; /* list of dma addresses per page */ member
380 dma_addr_t *dma_list);
Dcard_dev.c961 &m->dma_list[page_offs]); in ddcb_cmd_fixups()
/Linux-v4.19/drivers/infiniband/core/
Dumem_odp.c310 odp_data->dma_list = in ib_alloc_odp_umem()
311 vzalloc(array_size(pages, sizeof(*odp_data->dma_list))); in ib_alloc_odp_umem()
312 if (!odp_data->dma_list) { in ib_alloc_odp_umem()
399 umem->odp_data->dma_list = in ib_umem_odp_get()
400 vzalloc(array_size(sizeof(*umem->odp_data->dma_list), in ib_umem_odp_get()
402 if (!umem->odp_data->dma_list) { in ib_umem_odp_get()
461 vfree(umem->odp_data->dma_list); in ib_umem_odp_get()
532 vfree(umem->odp_data->dma_list); in ib_umem_odp_release()
578 if (!(umem->odp_data->dma_list[page_index])) { in ib_umem_odp_map_dma_single_page()
587 umem->odp_data->dma_list[page_index] = dma_addr | access_mask; in ib_umem_odp_map_dma_single_page()
[all …]
/Linux-v4.19/drivers/vfio/
Dvfio_iommu_type1.c65 struct rb_root dma_list; member
126 struct rb_node *node = iommu->dma_list.rb_node; in vfio_find_dma()
144 struct rb_node **link = &iommu->dma_list.rb_node, *parent = NULL; in vfio_link_dma()
158 rb_insert_color(&new->node, &iommu->dma_list); in vfio_link_dma()
163 rb_erase(&old->node, &iommu->dma_list); in vfio_unlink_dma()
1190 n = rb_first(&iommu->dma_list); in vfio_iommu_replay()
1486 while ((node = rb_first(&iommu->dma_list))) in vfio_iommu_unmap_unpin_all()
1494 n = rb_first(&iommu->dma_list); in vfio_iommu_unmap_unpin_reaccount()
1517 n = rb_first(&iommu->dma_list); in vfio_sanity_check_pfn_list()
1614 iommu->dma_list = RB_ROOT; in vfio_iommu_type1_open()
/Linux-v4.19/drivers/vme/
Dvme.c948 struct vme_dma_list *dma_list; in vme_new_dma_list() local
955 dma_list = kmalloc(sizeof(*dma_list), GFP_KERNEL); in vme_new_dma_list()
956 if (!dma_list) in vme_new_dma_list()
959 INIT_LIST_HEAD(&dma_list->entries); in vme_new_dma_list()
960 dma_list->parent = list_entry(resource->entry, in vme_new_dma_list()
963 mutex_init(&dma_list->mtx); in vme_new_dma_list()
965 return dma_list; in vme_new_dma_list()
/Linux-v4.19/drivers/block/rsxx/
Ddma.c686 struct list_head dma_list[RSXX_MAX_TARGETS]; in rsxx_dma_queue_bio() local
704 INIT_LIST_HEAD(&dma_list[i]); in rsxx_dma_queue_bio()
715 st = rsxx_queue_discard(card, &dma_list[tgt], laddr, in rsxx_dma_queue_bio()
737 st = rsxx_queue_dma(card, &dma_list[tgt], in rsxx_dma_queue_bio()
755 if (!list_empty(&dma_list[i])) { in rsxx_dma_queue_bio()
758 list_splice_tail(&dma_list[i], &card->ctrl[i].queue); in rsxx_dma_queue_bio()
770 rsxx_cleanup_dma_queue(&card->ctrl[i], &dma_list[i], in rsxx_dma_queue_bio()
/Linux-v4.19/include/rdma/
Dib_umem_odp.h58 dma_addr_t *dma_list; member
/Linux-v4.19/drivers/net/ethernet/mellanox/mlx4/
Deq.c975 u64 *dma_list = NULL; in mlx4_create_eq() local
996 dma_list = kmalloc_array(npages, sizeof(*dma_list), GFP_KERNEL); in mlx4_create_eq()
997 if (!dma_list) in mlx4_create_eq()
1013 dma_list[i] = t; in mlx4_create_eq()
1033 err = mlx4_write_mtt(dev, &eq->mtt, 0, npages, dma_list); in mlx4_create_eq()
1053 kfree(dma_list); in mlx4_create_eq()
1083 kfree(dma_list); in mlx4_create_eq()
/Linux-v4.19/drivers/infiniband/hw/mlx5/
Dmem.c162 dma_addr_t pa = umem->odp_data->dma_list[offset + i]; in __mlx5_ib_populate_pas()
Dodp.c211 if (umem->odp_data->dma_list[idx] & in mlx5_ib_invalidate_range()
/Linux-v4.19/sound/pci/
Dmaestro3.c760 struct m3_list dma_list; member
1758 s->index_list[2] = &chip->dma_list; in snd_m3_substream_open()
2185 chip->dma_list.curlen = 0; in snd_m3_assp_init()
2186 chip->dma_list.mem_addr = KDATA_DMA_XFER0; in snd_m3_assp_init()
2187 chip->dma_list.max = MAX_VIRTUAL_DMA_CHANNELS; in snd_m3_assp_init()