Lines Matching refs:async_tx

41 	container_of(tx, struct iop_adma_desc_slot, async_tx)
64 struct dma_async_tx_descriptor *tx = &desc->async_tx; in iop_adma_run_tx_complete_actions()
94 if (!async_tx_test_ack(&desc->async_tx)) in iop_adma_clean_slot()
129 iter->async_tx.cookie, iter->idx, busy, in __iop_adma_slot_cleanup()
130 iter->async_tx.phys, iop_desc_get_next_desc(iter), in __iop_adma_slot_cleanup()
131 async_tx_test_ack(&iter->async_tx)); in __iop_adma_slot_cleanup()
133 prefetch(&_iter->async_tx); in __iop_adma_slot_cleanup()
146 if (iter->async_tx.phys == current_desc) { in __iop_adma_slot_cleanup()
225 if (iter->xor_check_result && iter->async_tx.cookie) in __iop_adma_slot_cleanup()
288 prefetch(&_iter->async_tx); in iop_adma_alloc_slots()
320 iter->async_tx.phys, slots_per_op); in iop_adma_alloc_slots()
324 async_tx_ack(&iter->async_tx); in iop_adma_alloc_slots()
328 iter->async_tx.cookie = 0; in iop_adma_alloc_slots()
341 alloc_tail->async_tx.cookie = -EBUSY; in iop_adma_alloc_slots()
393 next_dma = grp_start->async_tx.phys; in iop_adma_tx_submit()
410 __func__, sw_desc->async_tx.cookie, sw_desc->idx); in iop_adma_tx_submit()
454 dma_async_tx_descriptor_init(&slot->async_tx, chan); in iop_adma_alloc_chan_resources()
455 slot->async_tx.tx_submit = iop_adma_tx_submit; in iop_adma_alloc_chan_resources()
460 slot->async_tx.phys = in iop_adma_alloc_chan_resources()
509 sw_desc->async_tx.flags = flags; in iop_adma_prep_dma_interrupt()
513 return sw_desc ? &sw_desc->async_tx : NULL; in iop_adma_prep_dma_interrupt()
540 sw_desc->async_tx.flags = flags; in iop_adma_prep_dma_memcpy()
544 return sw_desc ? &sw_desc->async_tx : NULL; in iop_adma_prep_dma_memcpy()
572 sw_desc->async_tx.flags = flags; in iop_adma_prep_dma_xor()
579 return sw_desc ? &sw_desc->async_tx : NULL; in iop_adma_prep_dma_xor()
607 sw_desc->async_tx.flags = flags; in iop_adma_prep_dma_xor_val()
614 return sw_desc ? &sw_desc->async_tx : NULL; in iop_adma_prep_dma_xor_val()
659 sw_desc->async_tx.flags = flags; in iop_adma_prep_dma_pq()
678 return sw_desc ? &sw_desc->async_tx : NULL; in iop_adma_prep_dma_pq()
713 sw_desc->async_tx.flags = flags; in iop_adma_prep_dma_pq_val()
722 return sw_desc ? &sw_desc->async_tx : NULL; in iop_adma_prep_dma_pq_val()
1461 async_tx_ack(&sw_desc->async_tx); in iop_chan_start_null_memcpy()
1467 cookie = dma_cookie_assign(&sw_desc->async_tx); in iop_chan_start_null_memcpy()
1484 iop_chan_set_next_descriptor(iop_chan, sw_desc->async_tx.phys); in iop_chan_start_null_memcpy()
1513 async_tx_ack(&sw_desc->async_tx); in iop_chan_start_null_xor()
1520 cookie = dma_cookie_assign(&sw_desc->async_tx); in iop_chan_start_null_xor()
1537 iop_chan_set_next_descriptor(iop_chan, sw_desc->async_tx.phys); in iop_chan_start_null_xor()