Lines Matching refs:od

265 static void dma_update(struct owl_dma *od, u32 reg, u32 val, bool state)  in dma_update()  argument
269 regval = readl(od->base + reg); in dma_update()
276 writel(val, od->base + reg); in dma_update()
279 static void dma_writel(struct owl_dma *od, u32 reg, u32 data) in dma_writel() argument
281 writel(data, od->base + reg); in dma_writel()
284 static u32 dma_readl(struct owl_dma *od, u32 reg) in dma_readl() argument
286 return readl(od->base + reg); in dma_readl()
341 static void owl_dma_free_lli(struct owl_dma *od, in owl_dma_free_lli() argument
345 dma_pool_free(od->lli_pool, lli, lli->phys); in owl_dma_free_lli()
348 static struct owl_dma_lli *owl_dma_alloc_lli(struct owl_dma *od) in owl_dma_alloc_lli() argument
353 lli = dma_pool_alloc(od->lli_pool, GFP_NOWAIT, &phys); in owl_dma_alloc_lli()
387 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli() local
444 if (od->devid == S700_DMA) { in owl_dma_cfg_lli()
467 static struct owl_dma_pchan *owl_dma_get_pchan(struct owl_dma *od, in owl_dma_get_pchan() argument
474 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_get_pchan()
475 pchan = &od->pchans[i]; in owl_dma_get_pchan()
477 spin_lock_irqsave(&od->lock, flags); in owl_dma_get_pchan()
480 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_get_pchan()
484 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_get_pchan()
490 static int owl_dma_pchan_busy(struct owl_dma *od, struct owl_dma_pchan *pchan) in owl_dma_pchan_busy() argument
494 val = dma_readl(od, OWL_DMA_IDLE_STAT); in owl_dma_pchan_busy()
499 static void owl_dma_terminate_pchan(struct owl_dma *od, in owl_dma_terminate_pchan() argument
508 spin_lock_irqsave(&od->lock, flags); in owl_dma_terminate_pchan()
509 dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), false); in owl_dma_terminate_pchan()
511 irq_pd = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_terminate_pchan()
513 dev_warn(od->dma.dev, in owl_dma_terminate_pchan()
516 dma_writel(od, OWL_DMA_IRQ_PD0, (1 << pchan->id)); in owl_dma_terminate_pchan()
521 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_terminate_pchan()
536 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_start_next_txd() local
549 while (owl_dma_pchan_busy(od, pchan)) in owl_dma_start_next_txd()
569 spin_lock_irqsave(&od->lock, flags); in owl_dma_start_next_txd()
571 dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), true); in owl_dma_start_next_txd()
573 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_start_next_txd()
583 static void owl_dma_phy_free(struct owl_dma *od, struct owl_dma_vchan *vchan) in owl_dma_phy_free() argument
586 owl_dma_terminate_pchan(od, vchan->pchan); in owl_dma_phy_free()
593 struct owl_dma *od = dev_id; in owl_dma_interrupt() local
600 spin_lock(&od->lock); in owl_dma_interrupt()
602 pending = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
605 for_each_set_bit(i, &pending, od->nr_pchans) { in owl_dma_interrupt()
606 pchan = &od->pchans[i]; in owl_dma_interrupt()
611 dma_writel(od, OWL_DMA_IRQ_PD0, pending); in owl_dma_interrupt()
614 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_interrupt()
615 pchan = &od->pchans[i]; in owl_dma_interrupt()
620 dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
622 global_irq_pending = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
625 dev_dbg(od->dma.dev, in owl_dma_interrupt()
637 spin_unlock(&od->lock); in owl_dma_interrupt()
639 for_each_set_bit(i, &pending, od->nr_pchans) { in owl_dma_interrupt()
642 pchan = &od->pchans[i]; in owl_dma_interrupt()
646 dev_warn(od->dma.dev, "no vchan attached on pchan %d\n", in owl_dma_interrupt()
666 owl_dma_phy_free(od, vchan); in owl_dma_interrupt()
675 static void owl_dma_free_txd(struct owl_dma *od, struct owl_dma_txd *txd) in owl_dma_free_txd() argument
683 owl_dma_free_lli(od, lli); in owl_dma_free_txd()
690 struct owl_dma *od = to_owl_dma(vd->tx.chan->device); in owl_dma_desc_free() local
693 owl_dma_free_txd(od, txd); in owl_dma_desc_free()
698 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_terminate_all() local
706 owl_dma_phy_free(od, vchan); in owl_dma_terminate_all()
839 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_phy_alloc_and_start() local
842 pchan = owl_dma_get_pchan(od, vchan); in owl_dma_phy_alloc_and_start()
846 dev_dbg(od->dma.dev, "allocated pchan %d\n", pchan->id); in owl_dma_phy_alloc_and_start()
870 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_prep_memcpy() local
888 lli = owl_dma_alloc_lli(od); in owl_dma_prep_memcpy()
910 owl_dma_free_txd(od, txd); in owl_dma_prep_memcpy()
921 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_prep_slave_sg() local
942 dev_err(od->dma.dev, in owl_dma_prep_slave_sg()
947 lli = owl_dma_alloc_lli(od); in owl_dma_prep_slave_sg()
974 owl_dma_free_txd(od, txd); in owl_dma_prep_slave_sg()
986 struct owl_dma *od = to_owl_dma(chan->device); in owl_prep_dma_cyclic() local
1003 lli = owl_dma_alloc_lli(od); in owl_prep_dma_cyclic()
1036 owl_dma_free_txd(od, txd); in owl_prep_dma_cyclic()
1049 static inline void owl_dma_free(struct owl_dma *od) in owl_dma_free() argument
1055 next, &od->dma.channels, vc.chan.device_node) { in owl_dma_free()
1064 struct owl_dma *od = ofdma->of_dma_data; in owl_dma_of_xlate() local
1069 if (drq > od->nr_vchans) in owl_dma_of_xlate()
1072 chan = dma_get_any_slave_channel(&od->dma); in owl_dma_of_xlate()
1093 struct owl_dma *od; in owl_dma_probe() local
1096 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in owl_dma_probe()
1097 if (!od) in owl_dma_probe()
1100 od->base = devm_platform_ioremap_resource(pdev, 0); in owl_dma_probe()
1101 if (IS_ERR(od->base)) in owl_dma_probe()
1102 return PTR_ERR(od->base); in owl_dma_probe()
1119 od->devid = (enum owl_dma_id)of_device_get_match_data(&pdev->dev); in owl_dma_probe()
1121 od->nr_pchans = nr_channels; in owl_dma_probe()
1122 od->nr_vchans = nr_requests; in owl_dma_probe()
1126 platform_set_drvdata(pdev, od); in owl_dma_probe()
1127 spin_lock_init(&od->lock); in owl_dma_probe()
1129 dma_cap_set(DMA_MEMCPY, od->dma.cap_mask); in owl_dma_probe()
1130 dma_cap_set(DMA_SLAVE, od->dma.cap_mask); in owl_dma_probe()
1131 dma_cap_set(DMA_CYCLIC, od->dma.cap_mask); in owl_dma_probe()
1133 od->dma.dev = &pdev->dev; in owl_dma_probe()
1134 od->dma.device_free_chan_resources = owl_dma_free_chan_resources; in owl_dma_probe()
1135 od->dma.device_tx_status = owl_dma_tx_status; in owl_dma_probe()
1136 od->dma.device_issue_pending = owl_dma_issue_pending; in owl_dma_probe()
1137 od->dma.device_prep_dma_memcpy = owl_dma_prep_memcpy; in owl_dma_probe()
1138 od->dma.device_prep_slave_sg = owl_dma_prep_slave_sg; in owl_dma_probe()
1139 od->dma.device_prep_dma_cyclic = owl_prep_dma_cyclic; in owl_dma_probe()
1140 od->dma.device_config = owl_dma_config; in owl_dma_probe()
1141 od->dma.device_pause = owl_dma_pause; in owl_dma_probe()
1142 od->dma.device_resume = owl_dma_resume; in owl_dma_probe()
1143 od->dma.device_terminate_all = owl_dma_terminate_all; in owl_dma_probe()
1144 od->dma.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in owl_dma_probe()
1145 od->dma.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in owl_dma_probe()
1146 od->dma.directions = BIT(DMA_MEM_TO_MEM); in owl_dma_probe()
1147 od->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in owl_dma_probe()
1149 INIT_LIST_HEAD(&od->dma.channels); in owl_dma_probe()
1151 od->clk = devm_clk_get(&pdev->dev, NULL); in owl_dma_probe()
1152 if (IS_ERR(od->clk)) { in owl_dma_probe()
1154 return PTR_ERR(od->clk); in owl_dma_probe()
1162 od->irq = platform_get_irq(pdev, 0); in owl_dma_probe()
1163 ret = devm_request_irq(&pdev->dev, od->irq, owl_dma_interrupt, 0, in owl_dma_probe()
1164 dev_name(&pdev->dev), od); in owl_dma_probe()
1171 od->pchans = devm_kcalloc(&pdev->dev, od->nr_pchans, in owl_dma_probe()
1173 if (!od->pchans) in owl_dma_probe()
1176 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_probe()
1177 struct owl_dma_pchan *pchan = &od->pchans[i]; in owl_dma_probe()
1180 pchan->base = od->base + OWL_DMA_CHAN_BASE(i); in owl_dma_probe()
1184 od->vchans = devm_kcalloc(&pdev->dev, od->nr_vchans, in owl_dma_probe()
1186 if (!od->vchans) in owl_dma_probe()
1189 for (i = 0; i < od->nr_vchans; i++) { in owl_dma_probe()
1190 struct owl_dma_vchan *vchan = &od->vchans[i]; in owl_dma_probe()
1193 vchan_init(&vchan->vc, &od->dma); in owl_dma_probe()
1197 od->lli_pool = dma_pool_create(dev_name(od->dma.dev), od->dma.dev, in owl_dma_probe()
1201 if (!od->lli_pool) { in owl_dma_probe()
1206 clk_prepare_enable(od->clk); in owl_dma_probe()
1208 ret = dma_async_device_register(&od->dma); in owl_dma_probe()
1216 owl_dma_of_xlate, od); in owl_dma_probe()
1225 dma_async_device_unregister(&od->dma); in owl_dma_probe()
1227 clk_disable_unprepare(od->clk); in owl_dma_probe()
1228 dma_pool_destroy(od->lli_pool); in owl_dma_probe()
1235 struct owl_dma *od = platform_get_drvdata(pdev); in owl_dma_remove() local
1238 dma_async_device_unregister(&od->dma); in owl_dma_remove()
1241 dma_writel(od, OWL_DMA_IRQ_EN0, 0x0); in owl_dma_remove()
1244 devm_free_irq(od->dma.dev, od->irq, od); in owl_dma_remove()
1246 owl_dma_free(od); in owl_dma_remove()
1248 clk_disable_unprepare(od->clk); in owl_dma_remove()
1249 dma_pool_destroy(od->lli_pool); in owl_dma_remove()