Home
last modified time | relevance | path

Searched refs:dma_device (Results 1 – 25 of 125) sorted by relevance

12345

/Linux-v5.4/drivers/dma/
Dmxs-dma.c143 struct dma_device dma_device; member
387 dev_dbg(mxs_dma->dma_device.dev, in mxs_dma_int_handler()
421 mxs_chan->ccw = dma_alloc_coherent(mxs_dma->dma_device.dev, in mxs_dma_alloc_chan_resources()
451 dma_free_coherent(mxs_dma->dma_device.dev, CCW_BLOCK_SIZE, in mxs_dma_alloc_chan_resources()
466 dma_free_coherent(mxs_dma->dma_device.dev, CCW_BLOCK_SIZE, in mxs_dma_free_chan_resources()
511 dev_err(mxs_dma->dma_device.dev, in mxs_dma_prep_slave_sg()
556 dev_err(mxs_dma->dma_device.dev, "maximum bytes for sg entry exceeded: %d > %d\n", in mxs_dma_prep_slave_sg()
611 dev_err(mxs_dma->dma_device.dev, in mxs_dma_prep_dma_cyclic()
618 dev_err(mxs_dma->dma_device.dev, in mxs_dma_prep_dma_cyclic()
748 dma_cap_mask_t mask = mxs_dma->dma_device.cap_mask; in mxs_dma_xlate()
[all …]
Dst_fdma.c176 if (fdev->dma_device.dev->of_node != dma_spec->np) in st_fdma_of_xlate()
185 chan = dma_get_any_slave_channel(&fdev->dma_device); in st_fdma_of_xlate()
792 INIT_LIST_HEAD(&fdev->dma_device.channels); in st_fdma_probe()
798 vchan_init(&fchan->vchan, &fdev->dma_device); in st_fdma_probe()
804 dma_cap_set(DMA_SLAVE, fdev->dma_device.cap_mask); in st_fdma_probe()
805 dma_cap_set(DMA_CYCLIC, fdev->dma_device.cap_mask); in st_fdma_probe()
806 dma_cap_set(DMA_MEMCPY, fdev->dma_device.cap_mask); in st_fdma_probe()
808 fdev->dma_device.dev = &pdev->dev; in st_fdma_probe()
809 fdev->dma_device.device_alloc_chan_resources = st_fdma_alloc_chan_res; in st_fdma_probe()
810 fdev->dma_device.device_free_chan_resources = st_fdma_free_chan_res; in st_fdma_probe()
[all …]
Dimx-dma.c177 struct dma_device dma_device; member
1052 return dma_request_channel(imxdma->dma_device.cap_mask, in imxdma_xlate()
1135 INIT_LIST_HEAD(&imxdma->dma_device.channels); in imxdma_probe()
1137 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1138 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); in imxdma_probe()
1139 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); in imxdma_probe()
1140 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1174 imxdmac->chan.device = &imxdma->dma_device; in imxdma_probe()
1180 &imxdma->dma_device.channels); in imxdma_probe()
1183 imxdma->dma_device.dev = &pdev->dev; in imxdma_probe()
[all …]
Ddmaengine.c170 __dma_device_satisfies_mask(struct dma_device *device, in __dma_device_satisfies_mask()
357 struct dma_device *device; in dma_issue_pending_all()
394 struct dma_device *device; in min_chan()
435 struct dma_device *device; in dma_channel_rebalance()
465 struct dma_device *device; in dma_get_slave_caps()
500 struct dma_device *dev, in private_candidate()
536 static struct dma_chan *find_candidate(struct dma_device *device, in find_candidate()
585 struct dma_device *device = chan->device; in dma_get_slave_channel()
608 struct dma_chan *dma_get_any_slave_channel(struct dma_device *device) in dma_get_any_slave_channel()
640 struct dma_device *device, *_d; in __dma_request_channel()
[all …]
Dimx-sdma.c433 struct dma_device dma_device; member
1956 dma_cap_mask_t mask = sdma->dma_device.cap_mask; in sdma_xlate()
2061 dma_cap_set(DMA_SLAVE, sdma->dma_device.cap_mask); in sdma_probe()
2062 dma_cap_set(DMA_CYCLIC, sdma->dma_device.cap_mask); in sdma_probe()
2063 dma_cap_set(DMA_MEMCPY, sdma->dma_device.cap_mask); in sdma_probe()
2065 INIT_LIST_HEAD(&sdma->dma_device.channels); in sdma_probe()
2082 vchan_init(&sdmac->vc, &sdma->dma_device); in sdma_probe()
2098 sdma->dma_device.dev = &pdev->dev; in sdma_probe()
2100 sdma->dma_device.device_alloc_chan_resources = sdma_alloc_chan_resources; in sdma_probe()
2101 sdma->dma_device.device_free_chan_resources = sdma_free_chan_resources; in sdma_probe()
[all …]
Ddma-jz4780.c148 struct dma_device dma_device; member
179 dma_device); in jz4780_dma_chan_parent()
799 dma_cap_mask_t mask = jzdma->dma_device.cap_mask; in jz4780_of_dma_xlate()
810 dev_err(jzdma->dma_device.dev, in jz4780_of_dma_xlate()
818 dev_err(jzdma->dma_device.dev, in jz4780_of_dma_xlate()
840 struct dma_device *dd; in jz4780_dma_probe()
914 dd = &jzdma->dma_device; in jz4780_dma_probe()
Dst_fdma.h133 struct dma_device dma_device; member
Didma64.h184 struct dma_device dma;
193 static inline struct idma64 *to_idma64(struct dma_device *ddev) in to_idma64()
Dfsl-edma-common.h155 struct dma_device dma_dev;
246 void fsl_edma_cleanup_vchan(struct dma_device *dmadev);
Dat_hdmac_regs.h326 struct dma_device dma_common;
344 static inline struct at_dma *to_at_dma(struct dma_device *ddev) in to_at_dma()
/Linux-v5.4/include/linux/
Ddmaengine.h254 struct dma_device *device;
722 struct dma_device { struct
1042 static inline bool is_dma_copy_aligned(struct dma_device *dev, size_t off1, in is_dma_copy_aligned()
1048 static inline bool is_dma_xor_aligned(struct dma_device *dev, size_t off1, in is_dma_xor_aligned()
1054 static inline bool is_dma_pq_aligned(struct dma_device *dev, size_t off1, in is_dma_pq_aligned()
1060 static inline bool is_dma_fill_aligned(struct dma_device *dev, size_t off1, in is_dma_fill_aligned()
1067 dma_set_maxpq(struct dma_device *dma, int maxpq, int has_pq_continue) in dma_set_maxpq()
1086 static inline bool dma_dev_has_pq_continue(struct dma_device *dma) in dma_dev_has_pq_continue()
1091 static inline unsigned short dma_dev_to_maxpq(struct dma_device *dma) in dma_dev_to_maxpq()
1109 static inline int dma_maxpq(struct dma_device *dma, enum dma_ctrl_flags flags) in dma_maxpq()
[all …]
Dshdma-base.h107 struct dma_device dma_dev;
/Linux-v5.4/drivers/dma/ioat/
Dsysfs.c19 struct dma_device *dma = c->device; in cap_show()
33 struct dma_device *dma = c->device; in version_show()
77 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_kobject_add()
98 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_kobject_del()
Dinit.c305 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_self_test()
496 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_probe()
552 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_remove()
573 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_enumerate_channels()
769 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_init_channel()
803 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_xor_val_self_test()
1065 struct dma_device *dma; in ioat_intr_quirk()
1093 struct dma_device *dma; in ioat3_dma_probe()
/Linux-v5.4/drivers/dma/hsu/
Dhsu.h111 struct dma_device dma;
118 static inline struct hsu_dma *to_hsu_dma(struct dma_device *ddev) in to_hsu_dma()
/Linux-v5.4/drivers/dma/dw-edma/
Ddw-edma-core.h109 struct dma_device wr_edma;
112 struct dma_device rd_edma;
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/fpga/
Dconn.c51 struct device *dma_device; in mlx5_fpga_conn_map_buf() local
57 dma_device = &conn->fdev->mdev->pdev->dev; in mlx5_fpga_conn_map_buf()
58 buf->sg[0].dma_addr = dma_map_single(dma_device, buf->sg[0].data, in mlx5_fpga_conn_map_buf()
60 err = dma_mapping_error(dma_device, buf->sg[0].dma_addr); in mlx5_fpga_conn_map_buf()
70 buf->sg[1].dma_addr = dma_map_single(dma_device, buf->sg[1].data, in mlx5_fpga_conn_map_buf()
72 err = dma_mapping_error(dma_device, buf->sg[1].dma_addr); in mlx5_fpga_conn_map_buf()
75 dma_unmap_single(dma_device, buf->sg[0].dma_addr, in mlx5_fpga_conn_map_buf()
87 struct device *dma_device; in mlx5_fpga_conn_unmap_buf() local
89 dma_device = &conn->fdev->mdev->pdev->dev; in mlx5_fpga_conn_unmap_buf()
91 dma_unmap_single(dma_device, buf->sg[1].dma_addr, in mlx5_fpga_conn_unmap_buf()
[all …]
/Linux-v5.4/crypto/async_tx/
Dasync_tx.c69 struct dma_device *device = chan->device; in async_tx_channel_switch()
224 struct dma_device *device; in async_trigger_callback()
Dasync_xor.c26 struct dma_device *dma = chan->device; in do_async_xor()
166 struct dma_device *device = chan ? chan->device : NULL; in async_xor()
261 struct dma_device *device = chan ? chan->device : NULL; in async_xor_val()
Dasync_pq.c42 struct dma_device *dma = chan->device; in do_async_gen_syndrome()
170 struct dma_device *device = chan ? chan->device : NULL; in async_gen_syndrome()
286 struct dma_device *device = chan ? chan->device : NULL; in async_syndrome_val()
Dasync_memcpy.c38 struct dma_device *device = chan ? chan->device : NULL; in async_memcpy()
/Linux-v5.4/drivers/infiniband/hw/hns/
Dhns_roce_db.c72 struct device *dma_device) in hns_roce_alloc_db_pgdir() argument
84 pgdir->page = dma_alloc_coherent(dma_device, PAGE_SIZE, in hns_roce_alloc_db_pgdir()
/Linux-v5.4/include/linux/platform_data/
Ddma-iop32x.h37 struct dma_device common;
/Linux-v5.4/include/rdma/
Dib_verbs.h2587 struct device *dma_device; member
3931 return dma_mapping_error(dev->dma_device, dma_addr); in ib_dma_mapping_error()
3945 return dma_map_single(dev->dma_device, cpu_addr, size, direction); in ib_dma_map_single()
3959 dma_unmap_single(dev->dma_device, addr, size, direction); in ib_dma_unmap_single()
3976 return dma_map_page(dev->dma_device, page, offset, size, direction); in ib_dma_map_page()
3990 dma_unmap_page(dev->dma_device, addr, size, direction); in ib_dma_unmap_page()
4004 return dma_map_sg(dev->dma_device, sg, nents, direction); in ib_dma_map_sg()
4018 dma_unmap_sg(dev->dma_device, sg, nents, direction); in ib_dma_unmap_sg()
4026 return dma_map_sg_attrs(dev->dma_device, sg, nents, direction, in ib_dma_map_sg_attrs()
4035 dma_unmap_sg_attrs(dev->dma_device, sg, nents, direction, dma_attrs); in ib_dma_unmap_sg_attrs()
[all …]
/Linux-v5.4/drivers/dma/ppc4xx/
Dadma.h73 struct dma_device common;

12345