Lines Matching refs:dwc

48 static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc)  in dwc_first_active()  argument
50 return to_dw_desc(dwc->active_list.next); in dwc_first_active()
56 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); in dwc_tx_submit() local
60 spin_lock_irqsave(&dwc->lock, flags); in dwc_tx_submit()
69 list_add_tail(&desc->desc_node, &dwc->queue); in dwc_tx_submit()
70 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_tx_submit()
77 static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) in dwc_desc_get() argument
79 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_desc_get()
87 dwc->descs_allocated++; in dwc_desc_get()
89 dma_async_tx_descriptor_init(&desc->txd, &dwc->chan); in dwc_desc_get()
96 static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_desc_put() argument
98 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_desc_put()
107 dwc->descs_allocated--; in dwc_desc_put()
111 dwc->descs_allocated--; in dwc_desc_put()
114 static void dwc_initialize(struct dw_dma_chan *dwc) in dwc_initialize() argument
116 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_initialize()
118 dw->initialize_chan(dwc); in dwc_initialize()
121 channel_set_bit(dw, MASK.XFER, dwc->mask); in dwc_initialize()
122 channel_set_bit(dw, MASK.ERROR, dwc->mask); in dwc_initialize()
127 static inline void dwc_dump_chan_regs(struct dw_dma_chan *dwc) in dwc_dump_chan_regs() argument
129 dev_err(chan2dev(&dwc->chan), in dwc_dump_chan_regs()
131 channel_readl(dwc, SAR), in dwc_dump_chan_regs()
132 channel_readl(dwc, DAR), in dwc_dump_chan_regs()
133 channel_readl(dwc, LLP), in dwc_dump_chan_regs()
134 channel_readl(dwc, CTL_HI), in dwc_dump_chan_regs()
135 channel_readl(dwc, CTL_LO)); in dwc_dump_chan_regs()
138 static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_chan_disable() argument
140 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_chan_disable()
141 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_chan_disable()
148 static inline void dwc_do_single_block(struct dw_dma_chan *dwc, in dwc_do_single_block() argument
151 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_do_single_block()
160 channel_writel(dwc, SAR, lli_read(desc, sar)); in dwc_do_single_block()
161 channel_writel(dwc, DAR, lli_read(desc, dar)); in dwc_do_single_block()
162 channel_writel(dwc, CTL_LO, ctllo); in dwc_do_single_block()
163 channel_writel(dwc, CTL_HI, lli_read(desc, ctlhi)); in dwc_do_single_block()
164 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_do_single_block()
167 dwc->tx_node_active = dwc->tx_node_active->next; in dwc_do_single_block()
171 static void dwc_dostart(struct dw_dma_chan *dwc, struct dw_desc *first) in dwc_dostart() argument
173 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_dostart()
174 u8 lms = DWC_LLP_LMS(dwc->dws.m_master); in dwc_dostart()
178 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_dostart()
179 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
182 dwc_dump_chan_regs(dwc); in dwc_dostart()
188 if (dwc->nollp) { in dwc_dostart()
190 &dwc->flags); in dwc_dostart()
192 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
197 dwc_initialize(dwc); in dwc_dostart()
200 dwc->tx_node_active = &first->tx_list; in dwc_dostart()
203 dwc_do_single_block(dwc, first); in dwc_dostart()
208 dwc_initialize(dwc); in dwc_dostart()
210 channel_writel(dwc, LLP, first->txd.phys | lms); in dwc_dostart()
211 channel_writel(dwc, CTL_LO, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); in dwc_dostart()
212 channel_writel(dwc, CTL_HI, 0); in dwc_dostart()
213 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_dostart()
216 static void dwc_dostart_first_queued(struct dw_dma_chan *dwc) in dwc_dostart_first_queued() argument
220 if (list_empty(&dwc->queue)) in dwc_dostart_first_queued()
223 list_move(dwc->queue.next, &dwc->active_list); in dwc_dostart_first_queued()
224 desc = dwc_first_active(dwc); in dwc_dostart_first_queued()
225 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); in dwc_dostart_first_queued()
226 dwc_dostart(dwc, desc); in dwc_dostart_first_queued()
232 dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc, in dwc_descriptor_complete() argument
240 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); in dwc_descriptor_complete()
242 spin_lock_irqsave(&dwc->lock, flags); in dwc_descriptor_complete()
253 dwc_desc_put(dwc, desc); in dwc_descriptor_complete()
254 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_descriptor_complete()
259 static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_complete_all() argument
265 spin_lock_irqsave(&dwc->lock, flags); in dwc_complete_all()
266 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_complete_all()
267 dev_err(chan2dev(&dwc->chan), in dwc_complete_all()
271 dwc_chan_disable(dw, dwc); in dwc_complete_all()
278 list_splice_init(&dwc->active_list, &list); in dwc_complete_all()
279 dwc_dostart_first_queued(dwc); in dwc_complete_all()
281 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_complete_all()
284 dwc_descriptor_complete(dwc, desc, true); in dwc_complete_all()
288 static inline u32 dwc_get_sent(struct dw_dma_chan *dwc) in dwc_get_sent() argument
290 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_get_sent()
291 u32 ctlhi = channel_readl(dwc, CTL_HI); in dwc_get_sent()
292 u32 ctllo = channel_readl(dwc, CTL_LO); in dwc_get_sent()
294 return dw->block2bytes(dwc, ctlhi, ctllo >> 4 & 7); in dwc_get_sent()
297 static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_scan_descriptors() argument
305 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
306 llp = channel_readl(dwc, LLP); in dwc_scan_descriptors()
309 if (status_xfer & dwc->mask) { in dwc_scan_descriptors()
311 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_scan_descriptors()
313 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
314 struct list_head *head, *active = dwc->tx_node_active; in dwc_scan_descriptors()
320 desc = dwc_first_active(dwc); in dwc_scan_descriptors()
333 dwc_do_single_block(dwc, child); in dwc_scan_descriptors()
335 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
340 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_scan_descriptors()
343 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
345 dwc_complete_all(dw, dwc); in dwc_scan_descriptors()
349 if (list_empty(&dwc->active_list)) { in dwc_scan_descriptors()
350 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
354 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
355 dev_vdbg(chan2dev(&dwc->chan), "%s: soft LLP mode\n", __func__); in dwc_scan_descriptors()
356 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
360 dev_vdbg(chan2dev(&dwc->chan), "%s: llp=%pad\n", __func__, &llp); in dwc_scan_descriptors()
362 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { in dwc_scan_descriptors()
368 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
375 desc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
376 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
384 desc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
385 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
395 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
396 dwc_descriptor_complete(dwc, desc, true); in dwc_scan_descriptors()
397 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
400 dev_err(chan2dev(&dwc->chan), in dwc_scan_descriptors()
404 dwc_chan_disable(dw, dwc); in dwc_scan_descriptors()
406 dwc_dostart_first_queued(dwc); in dwc_scan_descriptors()
407 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
410 static inline void dwc_dump_lli(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_dump_lli() argument
412 dev_crit(chan2dev(&dwc->chan), " desc: s0x%x d0x%x l0x%x c0x%x:%x\n", in dwc_dump_lli()
420 static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_handle_error() argument
426 dwc_scan_descriptors(dw, dwc); in dwc_handle_error()
428 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_error()
435 bad_desc = dwc_first_active(dwc); in dwc_handle_error()
437 list_move(dwc->queue.next, dwc->active_list.prev); in dwc_handle_error()
440 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_error()
441 if (!list_empty(&dwc->active_list)) in dwc_handle_error()
442 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_handle_error()
451 dev_WARN(chan2dev(&dwc->chan), "Bad descriptor submitted for DMA!\n" in dwc_handle_error()
453 dwc_dump_lli(dwc, bad_desc); in dwc_handle_error()
455 dwc_dump_lli(dwc, child); in dwc_handle_error()
457 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_error()
460 dwc_descriptor_complete(dwc, bad_desc, true); in dwc_handle_error()
466 struct dw_dma_chan *dwc; in dw_dma_tasklet() local
477 dwc = &dw->chan[i]; in dw_dma_tasklet()
478 if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) in dw_dma_tasklet()
481 dwc_handle_error(dw, dwc); in dw_dma_tasklet()
483 dwc_scan_descriptors(dw, dwc); in dw_dma_tasklet()
540 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_dma_memcpy() local
547 u8 m_master = dwc->dws.m_master; in dwc_prep_dma_memcpy()
563 dwc->direction = DMA_MEM_TO_MEM; in dwc_prep_dma_memcpy()
567 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_dma_memcpy()
576 desc = dwc_desc_get(dwc); in dwc_prep_dma_memcpy()
580 ctlhi = dw->bytes2block(dwc, len - offset, src_width, &xfer_count); in dwc_prep_dma_memcpy()
609 dwc_desc_put(dwc, first); in dwc_prep_dma_memcpy()
618 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_slave_sg() local
620 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dwc_prep_slave_sg()
624 u8 m_master = dwc->dws.m_master; in dwc_prep_slave_sg()
639 dwc->direction = direction; in dwc_prep_slave_sg()
647 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_slave_sg()
666 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
670 ctlhi = dw->bytes2block(dwc, len, mem_width, &dlen); in dwc_prep_slave_sg()
697 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_slave_sg()
714 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
718 ctlhi = dw->bytes2block(dwc, len, reg_width, &dlen); in dwc_prep_slave_sg()
760 dwc_desc_put(dwc, first); in dwc_prep_slave_sg()
766 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_filter() local
773 if (dws->channels && !(dws->channels & dwc->mask)) in dw_dma_filter()
777 memcpy(&dwc->dws, dws, sizeof(struct dw_dma_slave)); in dw_dma_filter()
785 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_config() local
788 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig)); in dwc_config()
790 dwc->dma_sconfig.src_maxburst = in dwc_config()
791 clamp(dwc->dma_sconfig.src_maxburst, 0U, dwc->max_burst); in dwc_config()
792 dwc->dma_sconfig.dst_maxburst = in dwc_config()
793 clamp(dwc->dma_sconfig.dst_maxburst, 0U, dwc->max_burst); in dwc_config()
795 dw->encode_maxburst(dwc, &dwc->dma_sconfig.src_maxburst); in dwc_config()
796 dw->encode_maxburst(dwc, &dwc->dma_sconfig.dst_maxburst); in dwc_config()
801 static void dwc_chan_pause(struct dw_dma_chan *dwc, bool drain) in dwc_chan_pause() argument
803 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_chan_pause()
806 dw->suspend_chan(dwc, drain); in dwc_chan_pause()
808 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) in dwc_chan_pause()
811 set_bit(DW_DMA_IS_PAUSED, &dwc->flags); in dwc_chan_pause()
816 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_pause() local
819 spin_lock_irqsave(&dwc->lock, flags); in dwc_pause()
820 dwc_chan_pause(dwc, false); in dwc_pause()
821 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_pause()
826 static inline void dwc_chan_resume(struct dw_dma_chan *dwc, bool drain) in dwc_chan_resume() argument
828 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_chan_resume()
830 dw->resume_chan(dwc, drain); in dwc_chan_resume()
832 clear_bit(DW_DMA_IS_PAUSED, &dwc->flags); in dwc_chan_resume()
837 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_resume() local
840 spin_lock_irqsave(&dwc->lock, flags); in dwc_resume()
842 if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags)) in dwc_resume()
843 dwc_chan_resume(dwc, false); in dwc_resume()
845 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_resume()
852 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_terminate_all() local
858 spin_lock_irqsave(&dwc->lock, flags); in dwc_terminate_all()
860 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_terminate_all()
862 dwc_chan_pause(dwc, true); in dwc_terminate_all()
864 dwc_chan_disable(dw, dwc); in dwc_terminate_all()
866 dwc_chan_resume(dwc, true); in dwc_terminate_all()
869 list_splice_init(&dwc->queue, &list); in dwc_terminate_all()
870 list_splice_init(&dwc->active_list, &list); in dwc_terminate_all()
872 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_terminate_all()
876 dwc_descriptor_complete(dwc, desc, false); in dwc_terminate_all()
881 static struct dw_desc *dwc_find_desc(struct dw_dma_chan *dwc, dma_cookie_t c) in dwc_find_desc() argument
885 list_for_each_entry(desc, &dwc->active_list, desc_node) in dwc_find_desc()
892 static u32 dwc_get_residue(struct dw_dma_chan *dwc, dma_cookie_t cookie) in dwc_get_residue() argument
898 spin_lock_irqsave(&dwc->lock, flags); in dwc_get_residue()
900 desc = dwc_find_desc(dwc, cookie); in dwc_get_residue()
902 if (desc == dwc_first_active(dwc)) { in dwc_get_residue()
904 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags) && residue) in dwc_get_residue()
905 residue -= dwc_get_sent(dwc); in dwc_get_residue()
913 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_get_residue()
922 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_tx_status() local
929 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); in dwc_tx_status()
935 dma_set_residue(txstate, dwc_get_residue(dwc, cookie)); in dwc_tx_status()
937 if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags) && ret == DMA_IN_PROGRESS) in dwc_tx_status()
945 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_issue_pending() local
948 spin_lock_irqsave(&dwc->lock, flags); in dwc_issue_pending()
949 if (list_empty(&dwc->active_list)) in dwc_issue_pending()
950 dwc_dostart_first_queued(dwc); in dwc_issue_pending()
951 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_issue_pending()
977 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_alloc_chan_resources() local
983 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_alloc_chan_resources()
1007 dw->in_use |= dwc->mask; in dwc_alloc_chan_resources()
1014 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_free_chan_resources() local
1019 dwc->descs_allocated); in dwc_free_chan_resources()
1022 BUG_ON(!list_empty(&dwc->active_list)); in dwc_free_chan_resources()
1023 BUG_ON(!list_empty(&dwc->queue)); in dwc_free_chan_resources()
1024 BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); in dwc_free_chan_resources()
1026 spin_lock_irqsave(&dwc->lock, flags); in dwc_free_chan_resources()
1029 memset(&dwc->dws, 0, sizeof(struct dw_dma_slave)); in dwc_free_chan_resources()
1032 channel_clear_bit(dw, MASK.XFER, dwc->mask); in dwc_free_chan_resources()
1033 channel_clear_bit(dw, MASK.BLOCK, dwc->mask); in dwc_free_chan_resources()
1034 channel_clear_bit(dw, MASK.ERROR, dwc->mask); in dwc_free_chan_resources()
1036 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_free_chan_resources()
1039 dw->in_use &= ~dwc->mask; in dwc_free_chan_resources()
1048 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_caps() local
1050 caps->max_burst = dwc->max_burst; in dwc_caps()
1059 if (dwc->nollp) in dwc_caps()
1151 struct dw_dma_chan *dwc = &dw->chan[i]; in do_dma_probe() local
1153 dwc->chan.device = &dw->dma; in do_dma_probe()
1154 dma_cookie_init(&dwc->chan); in do_dma_probe()
1156 list_add_tail(&dwc->chan.device_node, in do_dma_probe()
1159 list_add(&dwc->chan.device_node, &dw->dma.channels); in do_dma_probe()
1163 dwc->priority = pdata->nr_channels - i - 1; in do_dma_probe()
1165 dwc->priority = i; in do_dma_probe()
1167 dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; in do_dma_probe()
1168 spin_lock_init(&dwc->lock); in do_dma_probe()
1169 dwc->mask = 1 << i; in do_dma_probe()
1171 INIT_LIST_HEAD(&dwc->active_list); in do_dma_probe()
1172 INIT_LIST_HEAD(&dwc->queue); in do_dma_probe()
1174 channel_clear_bit(dw, CH_EN, dwc->mask); in do_dma_probe()
1176 dwc->direction = DMA_TRANS_NONE; in do_dma_probe()
1192 dwc->block_size = in do_dma_probe()
1202 dwc->nollp = in do_dma_probe()
1205 dwc->max_burst = in do_dma_probe()
1208 dwc->block_size = pdata->block_size; in do_dma_probe()
1209 dwc->nollp = !pdata->multi_block[i]; in do_dma_probe()
1210 dwc->max_burst = pdata->max_burst[i] ?: DW_DMA_MAX_BURST; in do_dma_probe()
1279 struct dw_dma_chan *dwc, *_dwc; in do_dma_remove() local
1289 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels, in do_dma_remove()
1291 list_del(&dwc->chan.device_node); in do_dma_remove()
1292 channel_clear_bit(dw, CH_EN, dwc->mask); in do_dma_remove()