Lines Matching refs:xd
89 struct uniphier_xdmac_desc *xd; member
132 struct uniphier_xdmac_desc *xd) in uniphier_xdmac_chan_start() argument
140 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start()
141 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start()
142 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start()
143 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start()
149 if (xd->dir == DMA_DEV_TO_MEM) { in uniphier_xdmac_chan_start()
158 if (xd->dir == DMA_MEM_TO_DEV) { in uniphier_xdmac_chan_start()
220 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_start() local
222 xd = uniphier_xdmac_next_desc(xc); in uniphier_xdmac_start()
223 if (xd) in uniphier_xdmac_start()
224 uniphier_xdmac_chan_start(xc, xd); in uniphier_xdmac_start()
227 xc->xd = xd; in uniphier_xdmac_start()
248 } else if ((stat & XDMAC_ID_ENDIDF) && xc->xd) { in uniphier_xdmac_chan_irq()
249 xc->xd->cur_node++; in uniphier_xdmac_chan_irq()
250 if (xc->xd->cur_node >= xc->xd->nr_node) { in uniphier_xdmac_chan_irq()
251 vchan_cookie_complete(&xc->xd->vd); in uniphier_xdmac_chan_irq()
254 uniphier_xdmac_chan_start(xc, xc->xd); in uniphier_xdmac_chan_irq()
285 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_dma_memcpy() local
295 xd = kzalloc(struct_size(xd, nodes, nr), GFP_NOWAIT); in uniphier_xdmac_prep_dma_memcpy()
296 if (!xd) in uniphier_xdmac_prep_dma_memcpy()
301 xd->nodes[i].src = src; in uniphier_xdmac_prep_dma_memcpy()
302 xd->nodes[i].dst = dst; in uniphier_xdmac_prep_dma_memcpy()
303 xd->nodes[i].burst_size = burst_size; in uniphier_xdmac_prep_dma_memcpy()
304 xd->nodes[i].nr_burst = len / burst_size; in uniphier_xdmac_prep_dma_memcpy()
311 xd->dir = DMA_MEM_TO_MEM; in uniphier_xdmac_prep_dma_memcpy()
312 xd->nr_node = nr; in uniphier_xdmac_prep_dma_memcpy()
313 xd->cur_node = 0; in uniphier_xdmac_prep_dma_memcpy()
315 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_dma_memcpy()
326 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_slave_sg() local
351 xd = kzalloc(struct_size(xd, nodes, sg_len), GFP_NOWAIT); in uniphier_xdmac_prep_slave_sg()
352 if (!xd) in uniphier_xdmac_prep_slave_sg()
356 xd->nodes[i].src = (direction == DMA_DEV_TO_MEM) in uniphier_xdmac_prep_slave_sg()
358 xd->nodes[i].dst = (direction == DMA_MEM_TO_DEV) in uniphier_xdmac_prep_slave_sg()
360 xd->nodes[i].burst_size = maxburst * buswidth; in uniphier_xdmac_prep_slave_sg()
361 xd->nodes[i].nr_burst = in uniphier_xdmac_prep_slave_sg()
362 sg_dma_len(sg) / xd->nodes[i].burst_size; in uniphier_xdmac_prep_slave_sg()
372 if (sg_dma_len(sg) % xd->nodes[i].burst_size) { in uniphier_xdmac_prep_slave_sg()
375 kfree(xd); in uniphier_xdmac_prep_slave_sg()
379 if (xd->nodes[i].nr_burst > XDMAC_MAX_WORDS) { in uniphier_xdmac_prep_slave_sg()
382 kfree(xd); in uniphier_xdmac_prep_slave_sg()
387 xd->dir = direction; in uniphier_xdmac_prep_slave_sg()
388 xd->nr_node = sg_len; in uniphier_xdmac_prep_slave_sg()
389 xd->cur_node = 0; in uniphier_xdmac_prep_slave_sg()
391 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_slave_sg()
415 if (xc->xd) { in uniphier_xdmac_terminate_all()
416 vchan_terminate_vdesc(&xc->xd->vd); in uniphier_xdmac_terminate_all()
417 xc->xd = NULL; in uniphier_xdmac_terminate_all()
443 if (vchan_issue_pending(vc) && !xc->xd) in uniphier_xdmac_issue_pending()