Lines Matching refs:dwc

93 static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc)  in dwc_first_active()  argument
95 return list_entry(dwc->active_list.next, struct dw_desc, desc_node); in dwc_first_active()
98 static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) in dwc_desc_get() argument
105 spin_lock_irqsave(&dwc->lock, flags); in dwc_desc_get()
106 list_for_each_entry_safe(desc, _desc, &dwc->free_list, desc_node) { in dwc_desc_get()
112 dev_dbg(chan2dev(&dwc->chan), "desc %p not ACKed\n", desc); in dwc_desc_get()
115 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_desc_get()
117 dev_vdbg(chan2dev(&dwc->chan), "scanned %u descriptors on freelist\n", i); in dwc_desc_get()
122 static void dwc_sync_desc_for_cpu(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_sync_desc_for_cpu() argument
127 dma_sync_single_for_cpu(chan2parent(&dwc->chan), in dwc_sync_desc_for_cpu()
130 dma_sync_single_for_cpu(chan2parent(&dwc->chan), in dwc_sync_desc_for_cpu()
139 static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_desc_put() argument
146 dwc_sync_desc_for_cpu(dwc, desc); in dwc_desc_put()
148 spin_lock_irqsave(&dwc->lock, flags); in dwc_desc_put()
150 dev_vdbg(chan2dev(&dwc->chan), in dwc_desc_put()
153 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_desc_put()
154 dev_vdbg(chan2dev(&dwc->chan), "moving desc %p to freelist\n", desc); in dwc_desc_put()
155 list_add(&desc->desc_node, &dwc->free_list); in dwc_desc_put()
156 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_desc_put()
160 static void dwc_initialize(struct dw_dma_chan *dwc) in dwc_initialize() argument
162 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_initialize()
163 struct dw_dma_slave *dws = dwc->chan.private; in dwc_initialize()
165 u32 cfglo = DWC_CFGL_CH_PRIOR(dwc->priority); in dwc_initialize()
167 if (dwc->initialized == true) in dwc_initialize()
181 channel_writel(dwc, CFG_LO, cfglo); in dwc_initialize()
182 channel_writel(dwc, CFG_HI, cfghi); in dwc_initialize()
185 channel_set_bit(dw, MASK.XFER, dwc->mask); in dwc_initialize()
186 channel_set_bit(dw, MASK.ERROR, dwc->mask); in dwc_initialize()
188 dwc->initialized = true; in dwc_initialize()
194 static void dwc_dostart(struct dw_dma_chan *dwc, struct dw_desc *first) in dwc_dostart() argument
196 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_dostart()
199 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_dostart()
200 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
202 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
204 channel_readl(dwc, SAR), in dwc_dostart()
205 channel_readl(dwc, DAR), in dwc_dostart()
206 channel_readl(dwc, LLP), in dwc_dostart()
207 channel_readl(dwc, CTL_HI), in dwc_dostart()
208 channel_readl(dwc, CTL_LO)); in dwc_dostart()
214 dwc_initialize(dwc); in dwc_dostart()
216 channel_writel(dwc, LLP, first->txd.phys); in dwc_dostart()
217 channel_writel(dwc, CTL_LO, in dwc_dostart()
219 channel_writel(dwc, CTL_HI, 0); in dwc_dostart()
220 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_dostart()
226 dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc, in dwc_descriptor_complete() argument
235 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); in dwc_descriptor_complete()
237 spin_lock_irqsave(&dwc->lock, flags); in dwc_descriptor_complete()
244 dwc_sync_desc_for_cpu(dwc, desc); in dwc_descriptor_complete()
251 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_descriptor_complete()
252 list_move(&desc->desc_node, &dwc->free_list); in dwc_descriptor_complete()
254 if (!dwc->chan.private) { in dwc_descriptor_complete()
255 struct device *parent = chan2parent(&dwc->chan); in dwc_descriptor_complete()
274 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_descriptor_complete()
280 static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_complete_all() argument
286 spin_lock_irqsave(&dwc->lock, flags); in dwc_complete_all()
287 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_complete_all()
288 dev_err(chan2dev(&dwc->chan), in dwc_complete_all()
292 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_complete_all()
293 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_complete_all()
301 list_splice_init(&dwc->active_list, &list); in dwc_complete_all()
302 if (!list_empty(&dwc->queue)) { in dwc_complete_all()
303 list_move(dwc->queue.next, &dwc->active_list); in dwc_complete_all()
304 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_complete_all()
307 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_complete_all()
310 dwc_descriptor_complete(dwc, desc, true); in dwc_complete_all()
313 static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_scan_descriptors() argument
321 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
322 llp = channel_readl(dwc, LLP); in dwc_scan_descriptors()
325 if (status_xfer & dwc->mask) { in dwc_scan_descriptors()
327 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_scan_descriptors()
328 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
330 dwc_complete_all(dw, dwc); in dwc_scan_descriptors()
334 if (list_empty(&dwc->active_list)) { in dwc_scan_descriptors()
335 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
339 dev_vdbg(chan2dev(&dwc->chan), "scan_descriptors: llp=0x%x\n", llp); in dwc_scan_descriptors()
341 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { in dwc_scan_descriptors()
344 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
351 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
358 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
366 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
367 dwc_descriptor_complete(dwc, desc, true); in dwc_scan_descriptors()
368 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
371 dev_err(chan2dev(&dwc->chan), in dwc_scan_descriptors()
375 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_scan_descriptors()
376 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_scan_descriptors()
379 if (!list_empty(&dwc->queue)) { in dwc_scan_descriptors()
380 list_move(dwc->queue.next, &dwc->active_list); in dwc_scan_descriptors()
381 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_scan_descriptors()
383 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
386 static void dwc_dump_lli(struct dw_dma_chan *dwc, struct dw_lli *lli) in dwc_dump_lli() argument
388 dev_printk(KERN_CRIT, chan2dev(&dwc->chan), in dwc_dump_lli()
394 static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_handle_error() argument
400 dwc_scan_descriptors(dw, dwc); in dwc_handle_error()
402 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_error()
409 bad_desc = dwc_first_active(dwc); in dwc_handle_error()
411 list_move(dwc->queue.next, dwc->active_list.prev); in dwc_handle_error()
414 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_error()
415 if (!list_empty(&dwc->active_list)) in dwc_handle_error()
416 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_handle_error()
425 dev_printk(KERN_CRIT, chan2dev(&dwc->chan), in dwc_handle_error()
427 dev_printk(KERN_CRIT, chan2dev(&dwc->chan), in dwc_handle_error()
429 dwc_dump_lli(dwc, &bad_desc->lli); in dwc_handle_error()
431 dwc_dump_lli(dwc, &child->lli); in dwc_handle_error()
433 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_error()
436 dwc_descriptor_complete(dwc, bad_desc, true); in dwc_handle_error()
443 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_get_src_addr() local
444 return channel_readl(dwc, SAR); in dw_dma_get_src_addr()
450 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_get_dst_addr() local
451 return channel_readl(dwc, DAR); in dw_dma_get_dst_addr()
456 static void dwc_handle_cyclic(struct dw_dma *dw, struct dw_dma_chan *dwc, in dwc_handle_cyclic() argument
461 if (dwc->mask) { in dwc_handle_cyclic()
465 dev_vdbg(chan2dev(&dwc->chan), "new cyclic period llp 0x%08x\n", in dwc_handle_cyclic()
466 channel_readl(dwc, LLP)); in dwc_handle_cyclic()
468 callback = dwc->cdesc->period_callback; in dwc_handle_cyclic()
469 callback_param = dwc->cdesc->period_callback_param; in dwc_handle_cyclic()
479 if (unlikely(status_err & dwc->mask) || in dwc_handle_cyclic()
480 unlikely(status_xfer & dwc->mask)) { in dwc_handle_cyclic()
483 dev_err(chan2dev(&dwc->chan), "cyclic DMA unexpected %s " in dwc_handle_cyclic()
487 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_cyclic()
489 dev_err(chan2dev(&dwc->chan), in dwc_handle_cyclic()
491 channel_readl(dwc, SAR), in dwc_handle_cyclic()
492 channel_readl(dwc, DAR), in dwc_handle_cyclic()
493 channel_readl(dwc, LLP), in dwc_handle_cyclic()
494 channel_readl(dwc, CTL_HI), in dwc_handle_cyclic()
495 channel_readl(dwc, CTL_LO)); in dwc_handle_cyclic()
497 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_handle_cyclic()
498 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_handle_cyclic()
502 channel_writel(dwc, LLP, 0); in dwc_handle_cyclic()
503 channel_writel(dwc, CTL_LO, 0); in dwc_handle_cyclic()
504 channel_writel(dwc, CTL_HI, 0); in dwc_handle_cyclic()
506 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_cyclic()
507 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_handle_cyclic()
509 for (i = 0; i < dwc->cdesc->periods; i++) in dwc_handle_cyclic()
510 dwc_dump_lli(dwc, &dwc->cdesc->desc[i]->lli); in dwc_handle_cyclic()
512 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_cyclic()
521 struct dw_dma_chan *dwc; in dw_dma_tasklet() local
532 dwc = &dw->chan[i]; in dw_dma_tasklet()
533 if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) in dw_dma_tasklet()
534 dwc_handle_cyclic(dw, dwc, status_err, status_xfer); in dw_dma_tasklet()
536 dwc_handle_error(dw, dwc); in dw_dma_tasklet()
538 dwc_scan_descriptors(dw, dwc); in dw_dma_tasklet()
586 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); in dwc_tx_submit() local
590 spin_lock_irqsave(&dwc->lock, flags); in dwc_tx_submit()
598 if (list_empty(&dwc->active_list)) { in dwc_tx_submit()
601 list_add_tail(&desc->desc_node, &dwc->active_list); in dwc_tx_submit()
602 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_tx_submit()
607 list_add_tail(&desc->desc_node, &dwc->queue); in dwc_tx_submit()
610 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_tx_submit()
619 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_dma_memcpy() local
662 desc = dwc_desc_get(dwc); in dwc_prep_dma_memcpy()
700 dwc_desc_put(dwc, first); in dwc_prep_dma_memcpy()
709 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_slave_sg() local
711 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dwc_prep_slave_sg()
758 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
825 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
881 dwc_desc_put(dwc, first); in dwc_prep_slave_sg()
904 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in set_runtime_config() local
910 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig)); in set_runtime_config()
912 convert_burst(&dwc->dma_sconfig.src_maxburst); in set_runtime_config()
913 convert_burst(&dwc->dma_sconfig.dst_maxburst); in set_runtime_config()
921 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_control() local
929 spin_lock_irqsave(&dwc->lock, flags); in dwc_control()
931 cfglo = channel_readl(dwc, CFG_LO); in dwc_control()
932 channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP); in dwc_control()
933 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY)) in dwc_control()
936 dwc->paused = true; in dwc_control()
937 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_control()
939 if (!dwc->paused) in dwc_control()
942 spin_lock_irqsave(&dwc->lock, flags); in dwc_control()
944 cfglo = channel_readl(dwc, CFG_LO); in dwc_control()
945 channel_writel(dwc, CFG_LO, cfglo & ~DWC_CFGL_CH_SUSP); in dwc_control()
946 dwc->paused = false; in dwc_control()
948 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_control()
950 spin_lock_irqsave(&dwc->lock, flags); in dwc_control()
952 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_control()
953 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_control()
956 dwc->paused = false; in dwc_control()
959 list_splice_init(&dwc->queue, &list); in dwc_control()
960 list_splice_init(&dwc->active_list, &list); in dwc_control()
962 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_control()
966 dwc_descriptor_complete(dwc, desc, false); in dwc_control()
981 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_tx_status() local
986 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); in dwc_tx_status()
992 dma_set_residue(txstate, dwc_first_active(dwc)->len); in dwc_tx_status()
994 if (dwc->paused) in dwc_tx_status()
1002 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_issue_pending() local
1004 if (!list_empty(&dwc->queue)) in dwc_issue_pending()
1005 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); in dwc_issue_pending()
1010 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_alloc_chan_resources() local
1019 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_alloc_chan_resources()
1032 spin_lock_irqsave(&dwc->lock, flags); in dwc_alloc_chan_resources()
1033 i = dwc->descs_allocated; in dwc_alloc_chan_resources()
1034 while (dwc->descs_allocated < NR_DESCS_PER_CHANNEL) { in dwc_alloc_chan_resources()
1035 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_alloc_chan_resources()
1041 spin_lock_irqsave(&dwc->lock, flags); in dwc_alloc_chan_resources()
1051 dwc_desc_put(dwc, desc); in dwc_alloc_chan_resources()
1053 spin_lock_irqsave(&dwc->lock, flags); in dwc_alloc_chan_resources()
1054 i = ++dwc->descs_allocated; in dwc_alloc_chan_resources()
1057 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_alloc_chan_resources()
1067 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_free_chan_resources() local
1074 dwc->descs_allocated); in dwc_free_chan_resources()
1077 BUG_ON(!list_empty(&dwc->active_list)); in dwc_free_chan_resources()
1078 BUG_ON(!list_empty(&dwc->queue)); in dwc_free_chan_resources()
1079 BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); in dwc_free_chan_resources()
1081 spin_lock_irqsave(&dwc->lock, flags); in dwc_free_chan_resources()
1082 list_splice_init(&dwc->free_list, &list); in dwc_free_chan_resources()
1083 dwc->descs_allocated = 0; in dwc_free_chan_resources()
1084 dwc->initialized = false; in dwc_free_chan_resources()
1087 channel_clear_bit(dw, MASK.XFER, dwc->mask); in dwc_free_chan_resources()
1088 channel_clear_bit(dw, MASK.ERROR, dwc->mask); in dwc_free_chan_resources()
1090 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_free_chan_resources()
1113 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_start() local
1114 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dw_dma_cyclic_start()
1117 if (!test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) { in dw_dma_cyclic_start()
1118 dev_err(chan2dev(&dwc->chan), "missing prep for cyclic DMA\n"); in dw_dma_cyclic_start()
1122 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_start()
1125 if (dma_readl(dw, CH_EN) & dwc->mask) { in dw_dma_cyclic_start()
1126 dev_err(chan2dev(&dwc->chan), in dw_dma_cyclic_start()
1128 dev_err(chan2dev(&dwc->chan), in dw_dma_cyclic_start()
1130 channel_readl(dwc, SAR), in dw_dma_cyclic_start()
1131 channel_readl(dwc, DAR), in dw_dma_cyclic_start()
1132 channel_readl(dwc, LLP), in dw_dma_cyclic_start()
1133 channel_readl(dwc, CTL_HI), in dw_dma_cyclic_start()
1134 channel_readl(dwc, CTL_LO)); in dw_dma_cyclic_start()
1135 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_start()
1139 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dw_dma_cyclic_start()
1140 dma_writel(dw, CLEAR.XFER, dwc->mask); in dw_dma_cyclic_start()
1143 channel_writel(dwc, LLP, dwc->cdesc->desc[0]->txd.phys); in dw_dma_cyclic_start()
1144 channel_writel(dwc, CTL_LO, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); in dw_dma_cyclic_start()
1145 channel_writel(dwc, CTL_HI, 0); in dw_dma_cyclic_start()
1147 channel_set_bit(dw, CH_EN, dwc->mask); in dw_dma_cyclic_start()
1149 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_start()
1163 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_stop() local
1164 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dw_dma_cyclic_stop()
1167 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_stop()
1169 channel_clear_bit(dw, CH_EN, dwc->mask); in dw_dma_cyclic_stop()
1170 while (dma_readl(dw, CH_EN) & dwc->mask) in dw_dma_cyclic_stop()
1173 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_stop()
1192 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_prep() local
1193 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dw_dma_cyclic_prep()
1204 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_prep()
1205 if (!list_empty(&dwc->queue) || !list_empty(&dwc->active_list)) { in dw_dma_cyclic_prep()
1206 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_prep()
1207 dev_dbg(chan2dev(&dwc->chan), in dw_dma_cyclic_prep()
1212 was_cyclic = test_and_set_bit(DW_DMA_IS_CYCLIC, &dwc->flags); in dw_dma_cyclic_prep()
1213 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_prep()
1215 dev_dbg(chan2dev(&dwc->chan), in dw_dma_cyclic_prep()
1253 desc = dwc_desc_get(dwc); in dw_dma_cyclic_prep()
1310 dev_dbg(chan2dev(&dwc->chan), "cyclic prepared buf 0x%08x len %zu " in dw_dma_cyclic_prep()
1315 dwc->cdesc = cdesc; in dw_dma_cyclic_prep()
1321 dwc_desc_put(dwc, cdesc->desc[i]); in dw_dma_cyclic_prep()
1325 clear_bit(DW_DMA_IS_CYCLIC, &dwc->flags); in dw_dma_cyclic_prep()
1336 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_free() local
1337 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dw_dma_cyclic_free()
1338 struct dw_cyclic_desc *cdesc = dwc->cdesc; in dw_dma_cyclic_free()
1342 dev_dbg(chan2dev(&dwc->chan), "cyclic free\n"); in dw_dma_cyclic_free()
1347 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_free()
1349 channel_clear_bit(dw, CH_EN, dwc->mask); in dw_dma_cyclic_free()
1350 while (dma_readl(dw, CH_EN) & dwc->mask) in dw_dma_cyclic_free()
1353 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dw_dma_cyclic_free()
1354 dma_writel(dw, CLEAR.XFER, dwc->mask); in dw_dma_cyclic_free()
1356 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_free()
1359 dwc_desc_put(dwc, cdesc->desc[i]); in dw_dma_cyclic_free()
1364 clear_bit(DW_DMA_IS_CYCLIC, &dwc->flags); in dw_dma_cyclic_free()
1449 struct dw_dma_chan *dwc = &dw->chan[i]; in dw_probe() local
1451 dwc->chan.device = &dw->dma; in dw_probe()
1452 dma_cookie_init(&dwc->chan); in dw_probe()
1454 list_add_tail(&dwc->chan.device_node, in dw_probe()
1457 list_add(&dwc->chan.device_node, &dw->dma.channels); in dw_probe()
1461 dwc->priority = pdata->nr_channels - i - 1; in dw_probe()
1463 dwc->priority = i; in dw_probe()
1465 dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; in dw_probe()
1466 spin_lock_init(&dwc->lock); in dw_probe()
1467 dwc->mask = 1 << i; in dw_probe()
1469 INIT_LIST_HEAD(&dwc->active_list); in dw_probe()
1470 INIT_LIST_HEAD(&dwc->queue); in dw_probe()
1471 INIT_LIST_HEAD(&dwc->free_list); in dw_probe()
1473 channel_clear_bit(dw, CH_EN, dwc->mask); in dw_probe()
1528 struct dw_dma_chan *dwc, *_dwc; in dw_remove() local
1537 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels, in dw_remove()
1539 list_del(&dwc->chan.device_node); in dw_remove()
1540 channel_clear_bit(dw, CH_EN, dwc->mask); in dw_remove()