Lines Matching refs:vchan

195 	struct virt_dma_chan	vchan;  member
292 return container_of(chan, struct ldma_chan, vchan.chan); in to_ldma_chan()
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg()
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init()
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class()
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on()
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off()
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg()
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg()
696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset()
723 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_byte_offset_cfg()
739 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_data_endian_cfg()
755 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_endian_cfg()
770 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_hdr_mode_cfg()
789 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_rxwr_np_cfg()
805 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_abc_cfg()
862 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cfg()
977 struct device *dev = c->vchan.chan.device->dev; in dma_alloc_desc_resource()
1004 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_en()
1017 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_issue_pending()
1021 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_issue_pending()
1022 if (vchan_issue_pending(&c->vchan)) { in ldma_issue_pending()
1026 vdesc = vchan_next_desc(&c->vchan); in ldma_issue_pending()
1029 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_issue_pending()
1037 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_issue_pending()
1051 vchan_synchronize(&c->vchan); in ldma_synchronize()
1062 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_terminate_all()
1063 vchan_get_all_descriptors(&c->vchan, &head); in ldma_terminate_all()
1064 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_terminate_all()
1065 vchan_dma_desc_free_list(&c->vchan, &head); in ldma_terminate_all()
1091 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_tx_status()
1103 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in dma_chan_irq()
1162 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_prep_slave_sg()
1256 return vchan_tx_prep(&c->vchan, &ds->vdesc, DMA_CTRL_ACK); in ldma_prep_slave_sg()
1272 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_alloc_chan_resources()
1273 struct device *dev = c->vchan.chan.device->dev; in ldma_alloc_chan_resources()
1299 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_free_chan_resources()
1315 struct virt_dma_chan *vc = &c->vchan; in dma_work()
1321 spin_lock_irqsave(&c->vchan.lock, flags); in dma_work()
1323 spin_unlock_irqrestore(&c->vchan.lock, flags); in dma_work()
1401 return dma_get_slave_channel(&d->chans[chan_id].vchan.chan); in ldma_xlate()
1414 c->vchan.desc_free = dma_free_desc_resource; in ldma_dma_init_v22()
1415 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v22()
1435 c->vchan.desc_free = dma_free_desc_resource; in ldma_dma_init_v3X()
1436 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v3X()