Home
last modified time | relevance | path

Searched refs:dma_async_tx_descriptor (Results 1 – 25 of 257) sorted by relevance

1234567891011

/Linux-v5.4/include/linux/
Ddmaengine.h496 struct dma_async_tx_descriptor { struct
501 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); argument
502 int (*desc_free)(struct dma_async_tx_descriptor *tx); argument
508 struct dma_async_tx_descriptor *next; argument
509 struct dma_async_tx_descriptor *parent; argument
515 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument
526 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap()
540 static inline void dma_descriptor_unmap(struct dma_async_tx_descriptor *tx) in dma_descriptor_unmap()
549 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock()
552 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock()
[all …]
Dasync_tx.h71 struct dma_async_tx_descriptor *depend_tx;
88 static inline void async_tx_issue_pending(struct dma_async_tx_descriptor *tx) in async_tx_issue_pending()
112 static inline void async_tx_issue_pending(struct dma_async_tx_descriptor *tx) in async_tx_issue_pending()
147 struct dma_async_tx_descriptor *tx, in init_async_submit()
158 void async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx,
161 struct dma_async_tx_descriptor *
165 struct dma_async_tx_descriptor *
170 struct dma_async_tx_descriptor *
175 struct dma_async_tx_descriptor *async_trigger_callback(struct async_submit_ctl *submit);
177 struct dma_async_tx_descriptor *
[all …]
/Linux-v5.4/crypto/async_tx/
Dasync_tx.c46 struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; in __async_tx_find_channel()
65 async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx, in async_tx_channel_switch()
66 struct dma_async_tx_descriptor *tx) in async_tx_channel_switch()
70 struct dma_async_tx_descriptor *intr_tx = (void *) ~0; in async_tx_channel_switch()
143 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, in async_tx_submit()
146 struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; in async_tx_submit()
220 struct dma_async_tx_descriptor *
225 struct dma_async_tx_descriptor *tx; in async_trigger_callback()
226 struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; in async_trigger_callback()
263 void async_tx_quiesce(struct dma_async_tx_descriptor **tx) in async_tx_quiesce()
Dasync_raid6_recov.c17 static struct dma_async_tx_descriptor *
35 struct dma_async_tx_descriptor *tx; in async_sum_product()
82 static struct dma_async_tx_descriptor *
99 struct dma_async_tx_descriptor *tx; in async_mult()
145 static struct dma_async_tx_descriptor *
149 struct dma_async_tx_descriptor *tx = NULL; in __2data_recov_4()
184 static struct dma_async_tx_descriptor *
188 struct dma_async_tx_descriptor *tx = NULL; in __2data_recov_5()
258 static struct dma_async_tx_descriptor *
262 struct dma_async_tx_descriptor *tx = NULL; in __2data_recov_n()
[all …]
Dasync_xor.c22 static __async_inline struct dma_async_tx_descriptor *
27 struct dma_async_tx_descriptor *tx = NULL; in do_async_xor()
159 struct dma_async_tx_descriptor *
175 struct dma_async_tx_descriptor *tx; in async_xor()
255 struct dma_async_tx_descriptor *
262 struct dma_async_tx_descriptor *tx = NULL; in async_xor_val()
Dasync_pq.c34 static __async_inline struct dma_async_tx_descriptor *
41 struct dma_async_tx_descriptor *tx = NULL; in do_async_gen_syndrome()
162 struct dma_async_tx_descriptor *
183 struct dma_async_tx_descriptor *tx; in async_gen_syndrome()
280 struct dma_async_tx_descriptor *
287 struct dma_async_tx_descriptor *tx; in async_syndrome_val()
Dasync_memcpy.c31 struct dma_async_tx_descriptor *
39 struct dma_async_tx_descriptor *tx = NULL; in async_memcpy()
/Linux-v5.4/drivers/dma/ioat/
Ddma.h184 struct dma_async_tx_descriptor txd;
218 struct dma_async_tx_descriptor *tx, int id) in __dump_desc_dbg()
357 struct dma_async_tx_descriptor *
360 struct dma_async_tx_descriptor *
362 struct dma_async_tx_descriptor *
365 struct dma_async_tx_descriptor *
369 struct dma_async_tx_descriptor *
373 struct dma_async_tx_descriptor *
377 struct dma_async_tx_descriptor *
380 struct dma_async_tx_descriptor *
Dprep.c100 struct dma_async_tx_descriptor *
151 static struct dma_async_tx_descriptor *
244 struct dma_async_tx_descriptor *
256 struct dma_async_tx_descriptor *
338 static struct dma_async_tx_descriptor *
462 static struct dma_async_tx_descriptor *
573 struct dma_async_tx_descriptor *
618 struct dma_async_tx_descriptor *
646 struct dma_async_tx_descriptor *
672 struct dma_async_tx_descriptor *
[all …]
/Linux-v5.4/drivers/dma/
Ddmaengine.h29 static inline dma_cookie_t dma_cookie_assign(struct dma_async_tx_descriptor *tx) in dma_cookie_assign()
52 static inline void dma_cookie_complete(struct dma_async_tx_descriptor *tx) in dma_cookie_complete()
106 dmaengine_desc_get_callback(struct dma_async_tx_descriptor *tx, in dmaengine_desc_get_callback()
152 dmaengine_desc_get_callback_invoke(struct dma_async_tx_descriptor *tx, in dmaengine_desc_get_callback_invoke()
Dvirt-dma.h16 struct dma_async_tx_descriptor tx;
47 extern dma_cookie_t vchan_tx_submit(struct dma_async_tx_descriptor *);
48 extern int vchan_tx_desc_free(struct dma_async_tx_descriptor *);
56 static inline struct dma_async_tx_descriptor *vchan_tx_prep(struct virt_dma_chan *vc, in vchan_tx_prep()
Dvirt-dma.c14 static struct virt_dma_desc *to_virt_desc(struct dma_async_tx_descriptor *tx) in to_virt_desc()
19 dma_cookie_t vchan_tx_submit(struct dma_async_tx_descriptor *tx) in vchan_tx_submit()
49 int vchan_tx_desc_free(struct dma_async_tx_descriptor *tx) in vchan_tx_desc_free()
Dmic_x100_dma.c80 struct dma_async_tx_descriptor *tx; in mic_dma_cleanup()
238 static dma_cookie_t mic_dma_tx_submit_unlock(struct dma_async_tx_descriptor *tx) in mic_dma_tx_submit_unlock()
257 static inline struct dma_async_tx_descriptor *
261 struct dma_async_tx_descriptor *tx = &ch->tx_array[idx]; in allocate_tx()
269 static struct dma_async_tx_descriptor *
300 static struct dma_async_tx_descriptor *
320 static struct dma_async_tx_descriptor *
499 struct dma_async_tx_descriptor *tx; in mic_dma_drain_chan()
Dmv_xor_v2.c179 struct dma_async_tx_descriptor async_tx;
299 mv_xor_v2_tx_submit(struct dma_async_tx_descriptor *tx) in mv_xor_v2_tx_submit()
373 static struct dma_async_tx_descriptor *
426 static struct dma_async_tx_descriptor *
485 static struct dma_async_tx_descriptor *
Diop-adma.c55 struct dma_async_tx_descriptor *tx = &desc->async_tx; in iop_adma_run_tx_complete_actions()
361 iop_adma_tx_submit(struct dma_async_tx_descriptor *tx) in iop_adma_tx_submit()
483 static struct dma_async_tx_descriptor *
505 static struct dma_async_tx_descriptor *
536 static struct dma_async_tx_descriptor *
571 static struct dma_async_tx_descriptor *
606 static struct dma_async_tx_descriptor *
670 static struct dma_async_tx_descriptor *
838 struct dma_async_tx_descriptor *tx; in iop_adma_memcpy_self_test()
914 struct dma_async_tx_descriptor *tx; in iop_adma_xor_val_self_test()
[all …]
Dfsl_raid.c86 static dma_cookie_t fsl_re_tx_submit(struct dma_async_tx_descriptor *tx) in fsl_re_tx_submit()
316 static struct dma_async_tx_descriptor *fsl_re_prep_dma_genq( in fsl_re_prep_dma_genq()
391 static struct dma_async_tx_descriptor *fsl_re_prep_dma_xor( in fsl_re_prep_dma_xor()
403 static struct dma_async_tx_descriptor *fsl_re_prep_dma_pq( in fsl_re_prep_dma_pq()
430 struct dma_async_tx_descriptor *tx; in fsl_re_prep_dma_pq()
526 static struct dma_async_tx_descriptor *fsl_re_prep_dma_memcpy( in fsl_re_prep_dma_memcpy()
Dimx-dma.c122 struct dma_async_tx_descriptor desc;
156 struct dma_async_tx_descriptor desc;
744 static dma_cookie_t imxdma_tx_submit(struct dma_async_tx_descriptor *tx) in imxdma_tx_submit()
773 memset(&desc->desc, 0, sizeof(struct dma_async_tx_descriptor)); in imxdma_alloc_chan_resources()
815 static struct dma_async_tx_descriptor *imxdma_prep_slave_sg( in imxdma_prep_slave_sg()
866 static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic( in imxdma_prep_dma_cyclic()
924 static struct dma_async_tx_descriptor *imxdma_prep_dma_memcpy( in imxdma_prep_dma_memcpy()
955 static struct dma_async_tx_descriptor *imxdma_prep_dma_interleaved( in imxdma_prep_dma_interleaved()
Dmmp_pdma.c85 struct dma_async_tx_descriptor async_tx;
93 struct dma_async_tx_descriptor desc;
341 static dma_cookie_t mmp_pdma_tx_submit(struct dma_async_tx_descriptor *tx) in mmp_pdma_tx_submit()
444 static struct dma_async_tx_descriptor *
525 static struct dma_async_tx_descriptor *
605 static struct dma_async_tx_descriptor *
930 struct dma_async_tx_descriptor *txd = &desc->async_tx; in dma_do_tasklet()
/Linux-v5.4/drivers/spi/
Dspi-dw-mid.c141 static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws, in dw_spi_dma_prepare_tx()
145 struct dma_async_tx_descriptor *txdesc; in dw_spi_dma_prepare_tx()
187 static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws, in dw_spi_dma_prepare_rx()
191 struct dma_async_tx_descriptor *rxdesc; in dw_spi_dma_prepare_rx()
242 struct dma_async_tx_descriptor *txdesc, *rxdesc; in mid_spi_dma_transfer()
/Linux-v5.4/include/linux/platform_data/
Ddma-ste-dma40.h178 dma_async_tx_descriptor *stedma40_slave_mem(struct dma_chan *chan, in stedma40_slave_mem()
199 dma_async_tx_descriptor *stedma40_slave_mem(struct dma_chan *chan, in stedma40_slave_mem()
/Linux-v5.4/drivers/tty/serial/
Dsamsung.h66 struct dma_async_tx_descriptor *tx_desc;
67 struct dma_async_tx_descriptor *rx_desc;
/Linux-v5.4/drivers/dma/sh/
Dshdma-base.c70 static dma_cookie_t shdma_tx_submit(struct dma_async_tx_descriptor *tx) in shdma_tx_submit()
337 struct dma_async_tx_descriptor *tx = &desc->async_tx; in __ld_cleanup()
560 static struct dma_async_tx_descriptor *shdma_prep_sg(struct shdma_chan *schan, in shdma_prep_sg()
639 static struct dma_async_tx_descriptor *shdma_prep_memcpy( in shdma_prep_memcpy()
661 static struct dma_async_tx_descriptor *shdma_prep_slave_sg( in shdma_prep_slave_sg()
691 static struct dma_async_tx_descriptor *shdma_prep_dma_cyclic( in shdma_prep_dma_cyclic()
698 struct dma_async_tx_descriptor *desc; in shdma_prep_dma_cyclic()
884 struct dma_async_tx_descriptor *tx = &sdesc->async_tx; in shdma_reset()
/Linux-v5.4/include/linux/dma/
Dmxs-dma.h16 static inline struct dma_async_tx_descriptor *mxs_dmaengine_prep_pio( in mxs_dmaengine_prep_pio()
/Linux-v5.4/drivers/md/
Draid5-log.h37 extern struct dma_async_tx_descriptor *
39 struct dma_async_tx_descriptor *tx);
/Linux-v5.4/drivers/mmc/host/
Dmxs-mmc.c214 static struct dma_async_tx_descriptor *mxs_mmc_prep_dma( in mxs_mmc_prep_dma()
218 struct dma_async_tx_descriptor *desc; in mxs_mmc_prep_dma()
253 struct dma_async_tx_descriptor *desc; in mxs_mmc_bc()
287 struct dma_async_tx_descriptor *desc; in mxs_mmc_ac()
348 struct dma_async_tx_descriptor *desc; in mxs_mmc_adtc()

1234567891011