Lines Matching refs:async_tx

43 	container_of(tx, struct mv_xor_desc_slot, async_tx)
185 mv_chan_set_next_descriptor(mv_chan, sw_desc->async_tx.phys); in mv_chan_start_new_chain()
196 BUG_ON(desc->async_tx.cookie < 0); in mv_desc_run_tx_complete_actions()
198 if (desc->async_tx.cookie > 0) { in mv_desc_run_tx_complete_actions()
199 cookie = desc->async_tx.cookie; in mv_desc_run_tx_complete_actions()
201 dma_descriptor_unmap(&desc->async_tx); in mv_desc_run_tx_complete_actions()
205 dmaengine_desc_get_callback_invoke(&desc->async_tx, NULL); in mv_desc_run_tx_complete_actions()
209 dma_run_dependencies(&desc->async_tx); in mv_desc_run_tx_complete_actions()
223 if (async_tx_test_ack(&iter->async_tx)) { in mv_chan_clean_completed_slots()
239 __func__, __LINE__, desc, desc->async_tx.flags); in mv_desc_clean_slot()
244 if (!async_tx_test_ack(&desc->async_tx)) { in mv_desc_clean_slot()
293 if (iter->async_tx.phys == current_desc) { in mv_chan_slot_cleanup()
298 if (iter->async_tx.phys == current_desc) { in mv_chan_slot_cleanup()
365 async_tx_ack(&iter->async_tx); in mv_chan_alloc_slot()
366 iter->async_tx.cookie = -EBUSY; in mv_chan_alloc_slot()
392 __func__, sw_desc, &sw_desc->async_tx); in mv_xor_tx_submit()
408 &old_chain_tail->async_tx.phys); in mv_xor_tx_submit()
411 mv_desc_set_next_desc(old_chain_tail, sw_desc->async_tx.phys); in mv_xor_tx_submit()
420 if (current_desc == old_chain_tail->async_tx.phys) in mv_xor_tx_submit()
456 dma_async_tx_descriptor_init(&slot->async_tx, chan); in mv_xor_alloc_chan_resources()
457 slot->async_tx.tx_submit = mv_xor_tx_submit; in mv_xor_alloc_chan_resources()
461 slot->async_tx.phys = dma_desc + idx * MV_XOR_SLOT_SIZE; in mv_xor_alloc_chan_resources()
579 sw_desc->async_tx.flags = flags; in mv_xor_prep_dma_xor()
594 __func__, sw_desc, &sw_desc->async_tx); in mv_xor_prep_dma_xor()
595 return sw_desc ? &sw_desc->async_tx : NULL; in mv_xor_prep_dma_xor()