Lines Matching refs:async_tx
396 set_desc_next(chan, &tail->hw, desc->async_tx.phys); in append_ld_queue()
428 cookie = dma_cookie_assign(&child->async_tx); in fsl_dma_tx_submit()
449 dma_pool_free(chan->desc_pool, desc, desc->async_tx.phys); in fsl_dma_free_descriptor()
470 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in fsl_dma_alloc_descriptor()
471 desc->async_tx.tx_submit = fsl_dma_tx_submit; in fsl_dma_alloc_descriptor()
472 desc->async_tx.phys = pdesc; in fsl_dma_alloc_descriptor()
493 if (async_tx_test_ack(&desc->async_tx)) in fsldma_clean_completed_descriptor()
509 struct dma_async_tx_descriptor *txd = &desc->async_tx; in fsldma_run_tx_complete_actions()
547 if (!async_tx_test_ack(&desc->async_tx)) { in fsldma_clean_running_descriptor()
556 dma_pool_free(chan->desc_pool, desc, desc->async_tx.phys); in fsldma_clean_running_descriptor()
619 set_cdar(chan, desc->async_tx.phys); in fsl_chan_xfer_ld_queue()
658 if (desc->async_tx.phys == curr_phys) { in fsldma_cleanup_descriptors()
793 set_desc_next(chan, &prev->hw, new->async_tx.phys); in fsl_dma_prep_memcpy()
795 new->async_tx.cookie = 0; in fsl_dma_prep_memcpy()
796 async_tx_ack(&new->async_tx); in fsl_dma_prep_memcpy()
807 new->async_tx.flags = flags; /* client is in control of this ack */ in fsl_dma_prep_memcpy()
808 new->async_tx.cookie = -EBUSY; in fsl_dma_prep_memcpy()
813 return &first->async_tx; in fsl_dma_prep_memcpy()