Lines Matching refs:uc
354 static inline u32 udma_tchanrt_read(struct udma_chan *uc, int reg) in udma_tchanrt_read() argument
356 if (!uc->tchan) in udma_tchanrt_read()
358 return udma_read(uc->tchan->reg_rt, reg); in udma_tchanrt_read()
361 static inline void udma_tchanrt_write(struct udma_chan *uc, int reg, u32 val) in udma_tchanrt_write() argument
363 if (!uc->tchan) in udma_tchanrt_write()
365 udma_write(uc->tchan->reg_rt, reg, val); in udma_tchanrt_write()
368 static inline void udma_tchanrt_update_bits(struct udma_chan *uc, int reg, in udma_tchanrt_update_bits() argument
371 if (!uc->tchan) in udma_tchanrt_update_bits()
373 udma_update_bits(uc->tchan->reg_rt, reg, mask, val); in udma_tchanrt_update_bits()
377 static inline u32 udma_rchanrt_read(struct udma_chan *uc, int reg) in udma_rchanrt_read() argument
379 if (!uc->rchan) in udma_rchanrt_read()
381 return udma_read(uc->rchan->reg_rt, reg); in udma_rchanrt_read()
384 static inline void udma_rchanrt_write(struct udma_chan *uc, int reg, u32 val) in udma_rchanrt_write() argument
386 if (!uc->rchan) in udma_rchanrt_write()
388 udma_write(uc->rchan->reg_rt, reg, val); in udma_rchanrt_write()
391 static inline void udma_rchanrt_update_bits(struct udma_chan *uc, int reg, in udma_rchanrt_update_bits() argument
394 if (!uc->rchan) in udma_rchanrt_update_bits()
396 udma_update_bits(uc->rchan->reg_rt, reg, mask, val); in udma_rchanrt_update_bits()
456 static void udma_reset_uchan(struct udma_chan *uc) in udma_reset_uchan() argument
458 memset(&uc->config, 0, sizeof(uc->config)); in udma_reset_uchan()
459 uc->config.remote_thread_id = -1; in udma_reset_uchan()
460 uc->config.mapped_channel_id = -1; in udma_reset_uchan()
461 uc->config.default_flow_id = -1; in udma_reset_uchan()
462 uc->state = UDMA_CHAN_IS_IDLE; in udma_reset_uchan()
465 static void udma_dump_chan_stdata(struct udma_chan *uc) in udma_dump_chan_stdata() argument
467 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata()
471 if (uc->config.dir == DMA_MEM_TO_DEV || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
476 udma_tchanrt_read(uc, offset)); in udma_dump_chan_stdata()
480 if (uc->config.dir == DMA_DEV_TO_MEM || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
485 udma_rchanrt_read(uc, offset)); in udma_dump_chan_stdata()
501 static struct udma_desc *udma_udma_desc_from_paddr(struct udma_chan *uc, in udma_udma_desc_from_paddr() argument
504 struct udma_desc *d = uc->terminated_desc; in udma_udma_desc_from_paddr()
515 d = uc->desc; in udma_udma_desc_from_paddr()
528 static void udma_free_hwdesc(struct udma_chan *uc, struct udma_desc *d) in udma_free_hwdesc() argument
530 if (uc->use_dma_pool) { in udma_free_hwdesc()
537 dma_pool_free(uc->hdesc_pool, in udma_free_hwdesc()
544 dma_free_coherent(uc->dma_dev, d->hwdesc[0].cppi5_desc_size, in udma_free_hwdesc()
564 struct udma_chan *uc = to_udma_chan(vd->tx.chan); in udma_purge_desc_work() local
567 udma_free_hwdesc(uc, d); in udma_purge_desc_work()
580 struct udma_chan *uc = to_udma_chan(vd->tx.chan); in udma_desc_free() local
584 if (uc->terminated_desc == d) in udma_desc_free()
585 uc->terminated_desc = NULL; in udma_desc_free()
587 if (uc->use_dma_pool) { in udma_desc_free()
588 udma_free_hwdesc(uc, d); in udma_desc_free()
600 static bool udma_is_chan_running(struct udma_chan *uc) in udma_is_chan_running() argument
605 if (uc->tchan) in udma_is_chan_running()
606 trt_ctl = udma_tchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_running()
607 if (uc->rchan) in udma_is_chan_running()
608 rrt_ctl = udma_rchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_running()
616 static bool udma_is_chan_paused(struct udma_chan *uc) in udma_is_chan_paused() argument
620 switch (uc->config.dir) { in udma_is_chan_paused()
622 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PEER_RT_EN_REG); in udma_is_chan_paused()
626 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_RT_EN_REG); in udma_is_chan_paused()
630 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_paused()
643 static inline dma_addr_t udma_get_rx_flush_hwdesc_paddr(struct udma_chan *uc) in udma_get_rx_flush_hwdesc_paddr() argument
645 return uc->ud->rx_flush.hwdescs[uc->config.pkt_mode].cppi5_desc_paddr; in udma_get_rx_flush_hwdesc_paddr()
648 static int udma_push_to_ring(struct udma_chan *uc, int idx) in udma_push_to_ring() argument
650 struct udma_desc *d = uc->desc; in udma_push_to_ring()
654 switch (uc->config.dir) { in udma_push_to_ring()
656 ring = uc->rflow->fd_ring; in udma_push_to_ring()
660 ring = uc->tchan->t_ring; in udma_push_to_ring()
668 paddr = udma_get_rx_flush_hwdesc_paddr(uc); in udma_push_to_ring()
678 static bool udma_desc_is_rx_flush(struct udma_chan *uc, dma_addr_t addr) in udma_desc_is_rx_flush() argument
680 if (uc->config.dir != DMA_DEV_TO_MEM) in udma_desc_is_rx_flush()
683 if (addr == udma_get_rx_flush_hwdesc_paddr(uc)) in udma_desc_is_rx_flush()
689 static int udma_pop_from_ring(struct udma_chan *uc, dma_addr_t *addr) in udma_pop_from_ring() argument
694 switch (uc->config.dir) { in udma_pop_from_ring()
696 ring = uc->rflow->r_ring; in udma_pop_from_ring()
700 ring = uc->tchan->tc_ring; in udma_pop_from_ring()
717 if (udma_desc_is_rx_flush(uc, *addr)) in udma_pop_from_ring()
723 static void udma_reset_rings(struct udma_chan *uc) in udma_reset_rings() argument
728 switch (uc->config.dir) { in udma_reset_rings()
730 if (uc->rchan) { in udma_reset_rings()
731 ring1 = uc->rflow->fd_ring; in udma_reset_rings()
732 ring2 = uc->rflow->r_ring; in udma_reset_rings()
737 if (uc->tchan) { in udma_reset_rings()
738 ring1 = uc->tchan->t_ring; in udma_reset_rings()
739 ring2 = uc->tchan->tc_ring; in udma_reset_rings()
753 if (uc->terminated_desc) { in udma_reset_rings()
754 udma_desc_free(&uc->terminated_desc->vd); in udma_reset_rings()
755 uc->terminated_desc = NULL; in udma_reset_rings()
759 static void udma_decrement_byte_counters(struct udma_chan *uc, u32 val) in udma_decrement_byte_counters() argument
761 if (uc->desc->dir == DMA_DEV_TO_MEM) { in udma_decrement_byte_counters()
762 udma_rchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_decrement_byte_counters()
763 udma_rchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_decrement_byte_counters()
764 if (uc->config.ep_type != PSIL_EP_NATIVE) in udma_decrement_byte_counters()
765 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_decrement_byte_counters()
767 udma_tchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_decrement_byte_counters()
768 udma_tchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_decrement_byte_counters()
769 if (!uc->bchan && uc->config.ep_type != PSIL_EP_NATIVE) in udma_decrement_byte_counters()
770 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_decrement_byte_counters()
774 static void udma_reset_counters(struct udma_chan *uc) in udma_reset_counters() argument
778 if (uc->tchan) { in udma_reset_counters()
779 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_reset_counters()
780 udma_tchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_reset_counters()
782 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_reset_counters()
783 udma_tchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_reset_counters()
785 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PCNT_REG); in udma_reset_counters()
786 udma_tchanrt_write(uc, UDMA_CHAN_RT_PCNT_REG, val); in udma_reset_counters()
788 if (!uc->bchan) { in udma_reset_counters()
789 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_reset_counters()
790 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_reset_counters()
794 if (uc->rchan) { in udma_reset_counters()
795 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_reset_counters()
796 udma_rchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_reset_counters()
798 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_reset_counters()
799 udma_rchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_reset_counters()
801 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PCNT_REG); in udma_reset_counters()
802 udma_rchanrt_write(uc, UDMA_CHAN_RT_PCNT_REG, val); in udma_reset_counters()
804 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_reset_counters()
805 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_reset_counters()
809 static int udma_reset_chan(struct udma_chan *uc, bool hard) in udma_reset_chan() argument
811 switch (uc->config.dir) { in udma_reset_chan()
813 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, 0); in udma_reset_chan()
814 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
817 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
818 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, 0); in udma_reset_chan()
821 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
822 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
829 udma_reset_counters(uc); in udma_reset_chan()
836 memcpy(&ucc_backup, &uc->config, sizeof(uc->config)); in udma_reset_chan()
837 uc->ud->ddev.device_free_chan_resources(&uc->vc.chan); in udma_reset_chan()
840 memcpy(&uc->config, &ucc_backup, sizeof(uc->config)); in udma_reset_chan()
841 ret = uc->ud->ddev.device_alloc_chan_resources(&uc->vc.chan); in udma_reset_chan()
849 if (uc->config.dir == DMA_DEV_TO_MEM) in udma_reset_chan()
850 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_reset_chan()
855 uc->state = UDMA_CHAN_IS_IDLE; in udma_reset_chan()
860 static void udma_start_desc(struct udma_chan *uc) in udma_start_desc() argument
862 struct udma_chan_config *ucc = &uc->config; in udma_start_desc()
864 if (uc->ud->match_data->type == DMA_TYPE_UDMA && ucc->pkt_mode && in udma_start_desc()
865 (uc->cyclic || ucc->dir == DMA_DEV_TO_MEM)) { in udma_start_desc()
874 for (i = 0; i < uc->desc->sglen; i++) in udma_start_desc()
875 udma_push_to_ring(uc, i); in udma_start_desc()
877 udma_push_to_ring(uc, 0); in udma_start_desc()
881 static bool udma_chan_needs_reconfiguration(struct udma_chan *uc) in udma_chan_needs_reconfiguration() argument
884 if (uc->config.ep_type == PSIL_EP_NATIVE) in udma_chan_needs_reconfiguration()
888 if (memcmp(&uc->static_tr, &uc->desc->static_tr, sizeof(uc->static_tr))) in udma_chan_needs_reconfiguration()
894 static int udma_start(struct udma_chan *uc) in udma_start() argument
896 struct virt_dma_desc *vd = vchan_next_desc(&uc->vc); in udma_start()
899 uc->desc = NULL; in udma_start()
905 uc->desc = to_udma_desc(&vd->tx); in udma_start()
908 if (udma_is_chan_running(uc) && !udma_chan_needs_reconfiguration(uc)) { in udma_start()
909 udma_start_desc(uc); in udma_start()
914 udma_reset_chan(uc, false); in udma_start()
917 udma_start_desc(uc); in udma_start()
919 switch (uc->desc->dir) { in udma_start()
922 if (uc->config.ep_type == PSIL_EP_PDMA_XY) { in udma_start()
923 u32 val = PDMA_STATIC_TR_Y(uc->desc->static_tr.elcnt) | in udma_start()
924 PDMA_STATIC_TR_X(uc->desc->static_tr.elsize); in udma_start()
926 uc->ud->match_data; in udma_start()
928 if (uc->config.enable_acc32) in udma_start()
930 if (uc->config.enable_burst) in udma_start()
933 udma_rchanrt_write(uc, in udma_start()
937 udma_rchanrt_write(uc, in udma_start()
939 PDMA_STATIC_TR_Z(uc->desc->static_tr.bstcnt, in udma_start()
943 memcpy(&uc->static_tr, &uc->desc->static_tr, in udma_start()
944 sizeof(uc->static_tr)); in udma_start()
947 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
951 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_start()
957 if (uc->config.ep_type == PSIL_EP_PDMA_XY) { in udma_start()
958 u32 val = PDMA_STATIC_TR_Y(uc->desc->static_tr.elcnt) | in udma_start()
959 PDMA_STATIC_TR_X(uc->desc->static_tr.elsize); in udma_start()
961 if (uc->config.enable_acc32) in udma_start()
963 if (uc->config.enable_burst) in udma_start()
966 udma_tchanrt_write(uc, in udma_start()
971 memcpy(&uc->static_tr, &uc->desc->static_tr, in udma_start()
972 sizeof(uc->static_tr)); in udma_start()
976 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_start()
979 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
984 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
986 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
994 uc->state = UDMA_CHAN_IS_ACTIVE; in udma_start()
1000 static int udma_stop(struct udma_chan *uc) in udma_stop() argument
1002 enum udma_chan_state old_state = uc->state; in udma_stop()
1004 uc->state = UDMA_CHAN_IS_TERMINATING; in udma_stop()
1005 reinit_completion(&uc->teardown_completed); in udma_stop()
1007 switch (uc->config.dir) { in udma_stop()
1009 if (!uc->cyclic && !uc->desc) in udma_stop()
1010 udma_push_to_ring(uc, -1); in udma_stop()
1012 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_stop()
1017 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_stop()
1020 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_stop()
1025 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_stop()
1030 uc->state = old_state; in udma_stop()
1031 complete_all(&uc->teardown_completed); in udma_stop()
1038 static void udma_cyclic_packet_elapsed(struct udma_chan *uc) in udma_cyclic_packet_elapsed() argument
1040 struct udma_desc *d = uc->desc; in udma_cyclic_packet_elapsed()
1045 udma_push_to_ring(uc, d->desc_idx); in udma_cyclic_packet_elapsed()
1049 static inline void udma_fetch_epib(struct udma_chan *uc, struct udma_desc *d) in udma_fetch_epib() argument
1056 static bool udma_is_desc_really_done(struct udma_chan *uc, struct udma_desc *d) in udma_is_desc_really_done() argument
1066 if (uc->config.ep_type == PSIL_EP_NATIVE || in udma_is_desc_really_done()
1067 uc->config.dir != DMA_MEM_TO_DEV || !(uc->config.tx_flags & DMA_PREP_INTERRUPT)) in udma_is_desc_really_done()
1070 peer_bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_is_desc_really_done()
1071 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_is_desc_really_done()
1075 uc->tx_drain.residue = bcnt - peer_bcnt; in udma_is_desc_really_done()
1076 uc->tx_drain.tstamp = ktime_get(); in udma_is_desc_really_done()
1085 struct udma_chan *uc = container_of(work, typeof(*uc), in udma_check_tx_completion() local
1093 if (uc->desc) { in udma_check_tx_completion()
1095 residue_diff = uc->tx_drain.residue; in udma_check_tx_completion()
1096 time_diff = uc->tx_drain.tstamp; in udma_check_tx_completion()
1101 desc_done = udma_is_desc_really_done(uc, uc->desc); in udma_check_tx_completion()
1109 time_diff = ktime_sub(uc->tx_drain.tstamp, in udma_check_tx_completion()
1111 residue_diff -= uc->tx_drain.residue; in udma_check_tx_completion()
1120 uc->tx_drain.residue; in udma_check_tx_completion()
1123 schedule_delayed_work(&uc->tx_drain.work, HZ); in udma_check_tx_completion()
1132 if (uc->desc) { in udma_check_tx_completion()
1133 struct udma_desc *d = uc->desc; in udma_check_tx_completion()
1135 udma_decrement_byte_counters(uc, d->residue); in udma_check_tx_completion()
1136 udma_start(uc); in udma_check_tx_completion()
1147 struct udma_chan *uc = data; in udma_ring_irq_handler() local
1151 if (udma_pop_from_ring(uc, &paddr) || !paddr) in udma_ring_irq_handler()
1154 spin_lock(&uc->vc.lock); in udma_ring_irq_handler()
1158 complete_all(&uc->teardown_completed); in udma_ring_irq_handler()
1160 if (uc->terminated_desc) { in udma_ring_irq_handler()
1161 udma_desc_free(&uc->terminated_desc->vd); in udma_ring_irq_handler()
1162 uc->terminated_desc = NULL; in udma_ring_irq_handler()
1165 if (!uc->desc) in udma_ring_irq_handler()
1166 udma_start(uc); in udma_ring_irq_handler()
1171 d = udma_udma_desc_from_paddr(uc, paddr); in udma_ring_irq_handler()
1177 dev_err(uc->ud->dev, "not matching descriptors!\n"); in udma_ring_irq_handler()
1181 if (d == uc->desc) { in udma_ring_irq_handler()
1183 if (uc->cyclic) { in udma_ring_irq_handler()
1184 udma_cyclic_packet_elapsed(uc); in udma_ring_irq_handler()
1187 if (udma_is_desc_really_done(uc, d)) { in udma_ring_irq_handler()
1188 udma_decrement_byte_counters(uc, d->residue); in udma_ring_irq_handler()
1189 udma_start(uc); in udma_ring_irq_handler()
1192 schedule_delayed_work(&uc->tx_drain.work, in udma_ring_irq_handler()
1205 spin_unlock(&uc->vc.lock); in udma_ring_irq_handler()
1212 struct udma_chan *uc = data; in udma_udma_irq_handler() local
1215 spin_lock(&uc->vc.lock); in udma_udma_irq_handler()
1216 d = uc->desc; in udma_udma_irq_handler()
1220 if (uc->cyclic) { in udma_udma_irq_handler()
1224 udma_decrement_byte_counters(uc, d->residue); in udma_udma_irq_handler()
1225 udma_start(uc); in udma_udma_irq_handler()
1230 spin_unlock(&uc->vc.lock); in udma_udma_irq_handler()
1364 static int bcdma_get_bchan(struct udma_chan *uc) in bcdma_get_bchan() argument
1366 struct udma_dev *ud = uc->ud; in bcdma_get_bchan()
1370 if (uc->bchan) { in bcdma_get_bchan()
1372 uc->id, uc->bchan->id); in bcdma_get_bchan()
1380 if (uc->config.tr_trigger_type) in bcdma_get_bchan()
1385 uc->bchan = __udma_reserve_bchan(ud, tpl, -1); in bcdma_get_bchan()
1386 if (IS_ERR(uc->bchan)) { in bcdma_get_bchan()
1387 ret = PTR_ERR(uc->bchan); in bcdma_get_bchan()
1388 uc->bchan = NULL; in bcdma_get_bchan()
1392 uc->tchan = uc->bchan; in bcdma_get_bchan()
1397 static int udma_get_tchan(struct udma_chan *uc) in udma_get_tchan() argument
1399 struct udma_dev *ud = uc->ud; in udma_get_tchan()
1402 if (uc->tchan) { in udma_get_tchan()
1404 uc->id, uc->tchan->id); in udma_get_tchan()
1413 uc->tchan = __udma_reserve_tchan(ud, uc->config.channel_tpl, in udma_get_tchan()
1414 uc->config.mapped_channel_id); in udma_get_tchan()
1415 if (IS_ERR(uc->tchan)) { in udma_get_tchan()
1416 ret = PTR_ERR(uc->tchan); in udma_get_tchan()
1417 uc->tchan = NULL; in udma_get_tchan()
1425 if (uc->config.default_flow_id >= 0) in udma_get_tchan()
1426 tflow_id = uc->config.default_flow_id; in udma_get_tchan()
1428 tflow_id = uc->tchan->id; in udma_get_tchan()
1432 clear_bit(uc->tchan->id, ud->tchan_map); in udma_get_tchan()
1433 uc->tchan = NULL; in udma_get_tchan()
1437 uc->tchan->tflow_id = tflow_id; in udma_get_tchan()
1440 uc->tchan->tflow_id = -1; in udma_get_tchan()
1446 static int udma_get_rchan(struct udma_chan *uc) in udma_get_rchan() argument
1448 struct udma_dev *ud = uc->ud; in udma_get_rchan()
1451 if (uc->rchan) { in udma_get_rchan()
1453 uc->id, uc->rchan->id); in udma_get_rchan()
1462 uc->rchan = __udma_reserve_rchan(ud, uc->config.channel_tpl, in udma_get_rchan()
1463 uc->config.mapped_channel_id); in udma_get_rchan()
1464 if (IS_ERR(uc->rchan)) { in udma_get_rchan()
1465 ret = PTR_ERR(uc->rchan); in udma_get_rchan()
1466 uc->rchan = NULL; in udma_get_rchan()
1473 static int udma_get_chan_pair(struct udma_chan *uc) in udma_get_chan_pair() argument
1475 struct udma_dev *ud = uc->ud; in udma_get_chan_pair()
1478 if ((uc->tchan && uc->rchan) && uc->tchan->id == uc->rchan->id) { in udma_get_chan_pair()
1480 uc->id, uc->tchan->id); in udma_get_chan_pair()
1484 if (uc->tchan) { in udma_get_chan_pair()
1486 uc->id, uc->tchan->id); in udma_get_chan_pair()
1488 } else if (uc->rchan) { in udma_get_chan_pair()
1490 uc->id, uc->rchan->id); in udma_get_chan_pair()
1512 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
1513 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
1516 uc->tchan->tflow_id = -1; in udma_get_chan_pair()
1521 static int udma_get_rflow(struct udma_chan *uc, int flow_id) in udma_get_rflow() argument
1523 struct udma_dev *ud = uc->ud; in udma_get_rflow()
1526 if (!uc->rchan) { in udma_get_rflow()
1527 dev_err(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
1531 if (uc->rflow) { in udma_get_rflow()
1533 uc->id, uc->rflow->id); in udma_get_rflow()
1537 uc->rflow = __udma_get_rflow(ud, flow_id); in udma_get_rflow()
1538 if (IS_ERR(uc->rflow)) { in udma_get_rflow()
1539 ret = PTR_ERR(uc->rflow); in udma_get_rflow()
1540 uc->rflow = NULL; in udma_get_rflow()
1547 static void bcdma_put_bchan(struct udma_chan *uc) in bcdma_put_bchan() argument
1549 struct udma_dev *ud = uc->ud; in bcdma_put_bchan()
1551 if (uc->bchan) { in bcdma_put_bchan()
1552 dev_dbg(ud->dev, "chan%d: put bchan%d\n", uc->id, in bcdma_put_bchan()
1553 uc->bchan->id); in bcdma_put_bchan()
1554 clear_bit(uc->bchan->id, ud->bchan_map); in bcdma_put_bchan()
1555 uc->bchan = NULL; in bcdma_put_bchan()
1556 uc->tchan = NULL; in bcdma_put_bchan()
1560 static void udma_put_rchan(struct udma_chan *uc) in udma_put_rchan() argument
1562 struct udma_dev *ud = uc->ud; in udma_put_rchan()
1564 if (uc->rchan) { in udma_put_rchan()
1565 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
1566 uc->rchan->id); in udma_put_rchan()
1567 clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
1568 uc->rchan = NULL; in udma_put_rchan()
1572 static void udma_put_tchan(struct udma_chan *uc) in udma_put_tchan() argument
1574 struct udma_dev *ud = uc->ud; in udma_put_tchan()
1576 if (uc->tchan) { in udma_put_tchan()
1577 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
1578 uc->tchan->id); in udma_put_tchan()
1579 clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
1581 if (uc->tchan->tflow_id >= 0) in udma_put_tchan()
1582 clear_bit(uc->tchan->tflow_id, ud->tflow_map); in udma_put_tchan()
1584 uc->tchan = NULL; in udma_put_tchan()
1588 static void udma_put_rflow(struct udma_chan *uc) in udma_put_rflow() argument
1590 struct udma_dev *ud = uc->ud; in udma_put_rflow()
1592 if (uc->rflow) { in udma_put_rflow()
1593 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
1594 uc->rflow->id); in udma_put_rflow()
1595 __udma_put_rflow(ud, uc->rflow); in udma_put_rflow()
1596 uc->rflow = NULL; in udma_put_rflow()
1600 static void bcdma_free_bchan_resources(struct udma_chan *uc) in bcdma_free_bchan_resources() argument
1602 if (!uc->bchan) in bcdma_free_bchan_resources()
1605 k3_ringacc_ring_free(uc->bchan->tc_ring); in bcdma_free_bchan_resources()
1606 k3_ringacc_ring_free(uc->bchan->t_ring); in bcdma_free_bchan_resources()
1607 uc->bchan->tc_ring = NULL; in bcdma_free_bchan_resources()
1608 uc->bchan->t_ring = NULL; in bcdma_free_bchan_resources()
1609 k3_configure_chan_coherency(&uc->vc.chan, 0); in bcdma_free_bchan_resources()
1611 bcdma_put_bchan(uc); in bcdma_free_bchan_resources()
1614 static int bcdma_alloc_bchan_resources(struct udma_chan *uc) in bcdma_alloc_bchan_resources() argument
1617 struct udma_dev *ud = uc->ud; in bcdma_alloc_bchan_resources()
1620 ret = bcdma_get_bchan(uc); in bcdma_alloc_bchan_resources()
1624 ret = k3_ringacc_request_rings_pair(ud->ringacc, uc->bchan->id, -1, in bcdma_alloc_bchan_resources()
1625 &uc->bchan->t_ring, in bcdma_alloc_bchan_resources()
1626 &uc->bchan->tc_ring); in bcdma_alloc_bchan_resources()
1637 k3_configure_chan_coherency(&uc->vc.chan, ud->asel); in bcdma_alloc_bchan_resources()
1639 ring_cfg.dma_dev = dmaengine_get_dma_device(&uc->vc.chan); in bcdma_alloc_bchan_resources()
1641 ret = k3_ringacc_ring_cfg(uc->bchan->t_ring, &ring_cfg); in bcdma_alloc_bchan_resources()
1648 k3_ringacc_ring_free(uc->bchan->tc_ring); in bcdma_alloc_bchan_resources()
1649 uc->bchan->tc_ring = NULL; in bcdma_alloc_bchan_resources()
1650 k3_ringacc_ring_free(uc->bchan->t_ring); in bcdma_alloc_bchan_resources()
1651 uc->bchan->t_ring = NULL; in bcdma_alloc_bchan_resources()
1652 k3_configure_chan_coherency(&uc->vc.chan, 0); in bcdma_alloc_bchan_resources()
1654 bcdma_put_bchan(uc); in bcdma_alloc_bchan_resources()
1659 static void udma_free_tx_resources(struct udma_chan *uc) in udma_free_tx_resources() argument
1661 if (!uc->tchan) in udma_free_tx_resources()
1664 k3_ringacc_ring_free(uc->tchan->t_ring); in udma_free_tx_resources()
1665 k3_ringacc_ring_free(uc->tchan->tc_ring); in udma_free_tx_resources()
1666 uc->tchan->t_ring = NULL; in udma_free_tx_resources()
1667 uc->tchan->tc_ring = NULL; in udma_free_tx_resources()
1669 udma_put_tchan(uc); in udma_free_tx_resources()
1672 static int udma_alloc_tx_resources(struct udma_chan *uc) in udma_alloc_tx_resources() argument
1675 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources()
1679 ret = udma_get_tchan(uc); in udma_alloc_tx_resources()
1683 tchan = uc->tchan; in udma_alloc_tx_resources()
1705 k3_configure_chan_coherency(&uc->vc.chan, uc->config.asel); in udma_alloc_tx_resources()
1706 ring_cfg.asel = uc->config.asel; in udma_alloc_tx_resources()
1707 ring_cfg.dma_dev = dmaengine_get_dma_device(&uc->vc.chan); in udma_alloc_tx_resources()
1719 k3_ringacc_ring_free(uc->tchan->tc_ring); in udma_alloc_tx_resources()
1720 uc->tchan->tc_ring = NULL; in udma_alloc_tx_resources()
1721 k3_ringacc_ring_free(uc->tchan->t_ring); in udma_alloc_tx_resources()
1722 uc->tchan->t_ring = NULL; in udma_alloc_tx_resources()
1724 udma_put_tchan(uc); in udma_alloc_tx_resources()
1729 static void udma_free_rx_resources(struct udma_chan *uc) in udma_free_rx_resources() argument
1731 if (!uc->rchan) in udma_free_rx_resources()
1734 if (uc->rflow) { in udma_free_rx_resources()
1735 struct udma_rflow *rflow = uc->rflow; in udma_free_rx_resources()
1742 udma_put_rflow(uc); in udma_free_rx_resources()
1745 udma_put_rchan(uc); in udma_free_rx_resources()
1748 static int udma_alloc_rx_resources(struct udma_chan *uc) in udma_alloc_rx_resources() argument
1750 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources()
1756 ret = udma_get_rchan(uc); in udma_alloc_rx_resources()
1761 if (uc->config.dir == DMA_MEM_TO_MEM) in udma_alloc_rx_resources()
1764 if (uc->config.default_flow_id >= 0) in udma_alloc_rx_resources()
1765 ret = udma_get_rflow(uc, uc->config.default_flow_id); in udma_alloc_rx_resources()
1767 ret = udma_get_rflow(uc, uc->rchan->id); in udma_alloc_rx_resources()
1774 rflow = uc->rflow; in udma_alloc_rx_resources()
1779 uc->rchan->id; in udma_alloc_rx_resources()
1792 if (uc->config.pkt_mode) in udma_alloc_rx_resources()
1802 k3_configure_chan_coherency(&uc->vc.chan, uc->config.asel); in udma_alloc_rx_resources()
1803 ring_cfg.asel = uc->config.asel; in udma_alloc_rx_resources()
1804 ring_cfg.dma_dev = dmaengine_get_dma_device(&uc->vc.chan); in udma_alloc_rx_resources()
1823 udma_put_rflow(uc); in udma_alloc_rx_resources()
1825 udma_put_rchan(uc); in udma_alloc_rx_resources()
1862 static int udma_tisci_m2m_channel_config(struct udma_chan *uc) in udma_tisci_m2m_channel_config() argument
1864 struct udma_dev *ud = uc->ud; in udma_tisci_m2m_channel_config()
1867 struct udma_tchan *tchan = uc->tchan; in udma_tisci_m2m_channel_config()
1868 struct udma_rchan *rchan = uc->rchan; in udma_tisci_m2m_channel_config()
1921 static int bcdma_tisci_m2m_channel_config(struct udma_chan *uc) in bcdma_tisci_m2m_channel_config() argument
1923 struct udma_dev *ud = uc->ud; in bcdma_tisci_m2m_channel_config()
1927 struct udma_bchan *bchan = uc->bchan; in bcdma_tisci_m2m_channel_config()
1954 static int udma_tisci_tx_channel_config(struct udma_chan *uc) in udma_tisci_tx_channel_config() argument
1956 struct udma_dev *ud = uc->ud; in udma_tisci_tx_channel_config()
1959 struct udma_tchan *tchan = uc->tchan; in udma_tisci_tx_channel_config()
1965 if (uc->config.pkt_mode) { in udma_tisci_tx_channel_config()
1967 fetch_size = cppi5_hdesc_calc_size(uc->config.needs_epib, in udma_tisci_tx_channel_config()
1968 uc->config.psd_size, 0); in udma_tisci_tx_channel_config()
1978 req_tx.tx_supr_tdpkt = uc->config.notdpkt; in udma_tisci_tx_channel_config()
1981 req_tx.tx_atype = uc->config.atype; in udma_tisci_tx_channel_config()
1982 if (uc->config.ep_type == PSIL_EP_PDMA_XY && in udma_tisci_tx_channel_config()
1997 static int bcdma_tisci_tx_channel_config(struct udma_chan *uc) in bcdma_tisci_tx_channel_config() argument
1999 struct udma_dev *ud = uc->ud; in bcdma_tisci_tx_channel_config()
2002 struct udma_tchan *tchan = uc->tchan; in bcdma_tisci_tx_channel_config()
2009 req_tx.tx_supr_tdpkt = uc->config.notdpkt; in bcdma_tisci_tx_channel_config()
2026 static int udma_tisci_rx_channel_config(struct udma_chan *uc) in udma_tisci_rx_channel_config() argument
2028 struct udma_dev *ud = uc->ud; in udma_tisci_rx_channel_config()
2031 struct udma_rchan *rchan = uc->rchan; in udma_tisci_rx_channel_config()
2032 int fd_ring = k3_ringacc_get_ring_id(uc->rflow->fd_ring); in udma_tisci_rx_channel_config()
2033 int rx_ring = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_tisci_rx_channel_config()
2039 if (uc->config.pkt_mode) { in udma_tisci_rx_channel_config()
2041 fetch_size = cppi5_hdesc_calc_size(uc->config.needs_epib, in udma_tisci_rx_channel_config()
2042 uc->config.psd_size, 0); in udma_tisci_rx_channel_config()
2054 req_rx.rx_atype = uc->config.atype; in udma_tisci_rx_channel_config()
2080 if (uc->config.needs_epib) in udma_tisci_rx_channel_config()
2084 if (uc->config.psd_size) in udma_tisci_rx_channel_config()
2107 static int bcdma_tisci_rx_channel_config(struct udma_chan *uc) in bcdma_tisci_rx_channel_config() argument
2109 struct udma_dev *ud = uc->ud; in bcdma_tisci_rx_channel_config()
2112 struct udma_rchan *rchan = uc->rchan; in bcdma_tisci_rx_channel_config()
2127 static int pktdma_tisci_rx_channel_config(struct udma_chan *uc) in pktdma_tisci_rx_channel_config() argument
2129 struct udma_dev *ud = uc->ud; in pktdma_tisci_rx_channel_config()
2138 req_rx.index = uc->rchan->id; in pktdma_tisci_rx_channel_config()
2142 dev_err(ud->dev, "rchan%d cfg failed %d\n", uc->rchan->id, ret); in pktdma_tisci_rx_channel_config()
2152 flow_req.flow_index = uc->rflow->id; in pktdma_tisci_rx_channel_config()
2154 if (uc->config.needs_epib) in pktdma_tisci_rx_channel_config()
2158 if (uc->config.psd_size) in pktdma_tisci_rx_channel_config()
2167 dev_err(ud->dev, "flow%d config failed: %d\n", uc->rflow->id, in pktdma_tisci_rx_channel_config()
2175 struct udma_chan *uc = to_udma_chan(chan); in udma_alloc_chan_resources() local
2182 uc->dma_dev = ud->dev; in udma_alloc_chan_resources()
2184 if (uc->config.pkt_mode || uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
2185 uc->use_dma_pool = true; in udma_alloc_chan_resources()
2187 if (uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
2188 uc->config.hdesc_size = cppi5_trdesc_calc_size( in udma_alloc_chan_resources()
2190 uc->config.pkt_mode = false; in udma_alloc_chan_resources()
2194 if (uc->use_dma_pool) { in udma_alloc_chan_resources()
2195 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in udma_alloc_chan_resources()
2196 uc->config.hdesc_size, in udma_alloc_chan_resources()
2199 if (!uc->hdesc_pool) { in udma_alloc_chan_resources()
2202 uc->use_dma_pool = false; in udma_alloc_chan_resources()
2212 reinit_completion(&uc->teardown_completed); in udma_alloc_chan_resources()
2213 complete_all(&uc->teardown_completed); in udma_alloc_chan_resources()
2214 uc->state = UDMA_CHAN_IS_IDLE; in udma_alloc_chan_resources()
2216 switch (uc->config.dir) { in udma_alloc_chan_resources()
2219 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in udma_alloc_chan_resources()
2220 uc->id); in udma_alloc_chan_resources()
2222 ret = udma_get_chan_pair(uc); in udma_alloc_chan_resources()
2226 ret = udma_alloc_tx_resources(uc); in udma_alloc_chan_resources()
2228 udma_put_rchan(uc); in udma_alloc_chan_resources()
2232 ret = udma_alloc_rx_resources(uc); in udma_alloc_chan_resources()
2234 udma_free_tx_resources(uc); in udma_alloc_chan_resources()
2238 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2239 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2242 irq_ring = uc->tchan->tc_ring; in udma_alloc_chan_resources()
2243 irq_udma_idx = uc->tchan->id; in udma_alloc_chan_resources()
2245 ret = udma_tisci_m2m_channel_config(uc); in udma_alloc_chan_resources()
2249 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in udma_alloc_chan_resources()
2250 uc->id); in udma_alloc_chan_resources()
2252 ret = udma_alloc_tx_resources(uc); in udma_alloc_chan_resources()
2256 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2257 uc->config.dst_thread = uc->config.remote_thread_id; in udma_alloc_chan_resources()
2258 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in udma_alloc_chan_resources()
2260 irq_ring = uc->tchan->tc_ring; in udma_alloc_chan_resources()
2261 irq_udma_idx = uc->tchan->id; in udma_alloc_chan_resources()
2263 ret = udma_tisci_tx_channel_config(uc); in udma_alloc_chan_resources()
2267 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in udma_alloc_chan_resources()
2268 uc->id); in udma_alloc_chan_resources()
2270 ret = udma_alloc_rx_resources(uc); in udma_alloc_chan_resources()
2274 uc->config.src_thread = uc->config.remote_thread_id; in udma_alloc_chan_resources()
2275 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2278 irq_ring = uc->rflow->r_ring; in udma_alloc_chan_resources()
2279 irq_udma_idx = soc_data->oes.udma_rchan + uc->rchan->id; in udma_alloc_chan_resources()
2281 ret = udma_tisci_rx_channel_config(uc); in udma_alloc_chan_resources()
2285 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in udma_alloc_chan_resources()
2286 __func__, uc->id, uc->config.dir); in udma_alloc_chan_resources()
2296 if (udma_is_chan_running(uc)) { in udma_alloc_chan_resources()
2297 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
2298 udma_reset_chan(uc, false); in udma_alloc_chan_resources()
2299 if (udma_is_chan_running(uc)) { in udma_alloc_chan_resources()
2300 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
2307 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2310 uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2314 uc->psil_paired = true; in udma_alloc_chan_resources()
2316 uc->irq_num_ring = k3_ringacc_get_ring_irq_num(irq_ring); in udma_alloc_chan_resources()
2317 if (uc->irq_num_ring <= 0) { in udma_alloc_chan_resources()
2324 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in udma_alloc_chan_resources()
2325 IRQF_TRIGGER_HIGH, uc->name, uc); in udma_alloc_chan_resources()
2327 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in udma_alloc_chan_resources()
2332 if (is_slave_direction(uc->config.dir) && !uc->config.pkt_mode) { in udma_alloc_chan_resources()
2333 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in udma_alloc_chan_resources()
2334 if (uc->irq_num_udma <= 0) { in udma_alloc_chan_resources()
2337 free_irq(uc->irq_num_ring, uc); in udma_alloc_chan_resources()
2342 ret = request_irq(uc->irq_num_udma, udma_udma_irq_handler, 0, in udma_alloc_chan_resources()
2343 uc->name, uc); in udma_alloc_chan_resources()
2346 uc->id); in udma_alloc_chan_resources()
2347 free_irq(uc->irq_num_ring, uc); in udma_alloc_chan_resources()
2351 uc->irq_num_udma = 0; in udma_alloc_chan_resources()
2354 udma_reset_rings(uc); in udma_alloc_chan_resources()
2359 uc->irq_num_ring = 0; in udma_alloc_chan_resources()
2360 uc->irq_num_udma = 0; in udma_alloc_chan_resources()
2362 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2363 uc->psil_paired = false; in udma_alloc_chan_resources()
2365 udma_free_tx_resources(uc); in udma_alloc_chan_resources()
2366 udma_free_rx_resources(uc); in udma_alloc_chan_resources()
2368 udma_reset_uchan(uc); in udma_alloc_chan_resources()
2370 if (uc->use_dma_pool) { in udma_alloc_chan_resources()
2371 dma_pool_destroy(uc->hdesc_pool); in udma_alloc_chan_resources()
2372 uc->use_dma_pool = false; in udma_alloc_chan_resources()
2380 struct udma_chan *uc = to_udma_chan(chan); in bcdma_alloc_chan_resources() local
2387 uc->config.pkt_mode = false; in bcdma_alloc_chan_resources()
2393 reinit_completion(&uc->teardown_completed); in bcdma_alloc_chan_resources()
2394 complete_all(&uc->teardown_completed); in bcdma_alloc_chan_resources()
2395 uc->state = UDMA_CHAN_IS_IDLE; in bcdma_alloc_chan_resources()
2397 switch (uc->config.dir) { in bcdma_alloc_chan_resources()
2400 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2401 uc->id); in bcdma_alloc_chan_resources()
2403 ret = bcdma_alloc_bchan_resources(uc); in bcdma_alloc_chan_resources()
2407 irq_ring_idx = uc->bchan->id + oes->bcdma_bchan_ring; in bcdma_alloc_chan_resources()
2408 irq_udma_idx = uc->bchan->id + oes->bcdma_bchan_data; in bcdma_alloc_chan_resources()
2410 ret = bcdma_tisci_m2m_channel_config(uc); in bcdma_alloc_chan_resources()
2414 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in bcdma_alloc_chan_resources()
2415 uc->id); in bcdma_alloc_chan_resources()
2417 ret = udma_alloc_tx_resources(uc); in bcdma_alloc_chan_resources()
2419 uc->config.remote_thread_id = -1; in bcdma_alloc_chan_resources()
2423 uc->config.src_thread = ud->psil_base + uc->tchan->id; in bcdma_alloc_chan_resources()
2424 uc->config.dst_thread = uc->config.remote_thread_id; in bcdma_alloc_chan_resources()
2425 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in bcdma_alloc_chan_resources()
2427 irq_ring_idx = uc->tchan->id + oes->bcdma_tchan_ring; in bcdma_alloc_chan_resources()
2428 irq_udma_idx = uc->tchan->id + oes->bcdma_tchan_data; in bcdma_alloc_chan_resources()
2430 ret = bcdma_tisci_tx_channel_config(uc); in bcdma_alloc_chan_resources()
2434 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2435 uc->id); in bcdma_alloc_chan_resources()
2437 ret = udma_alloc_rx_resources(uc); in bcdma_alloc_chan_resources()
2439 uc->config.remote_thread_id = -1; in bcdma_alloc_chan_resources()
2443 uc->config.src_thread = uc->config.remote_thread_id; in bcdma_alloc_chan_resources()
2444 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in bcdma_alloc_chan_resources()
2447 irq_ring_idx = uc->rchan->id + oes->bcdma_rchan_ring; in bcdma_alloc_chan_resources()
2448 irq_udma_idx = uc->rchan->id + oes->bcdma_rchan_data; in bcdma_alloc_chan_resources()
2450 ret = bcdma_tisci_rx_channel_config(uc); in bcdma_alloc_chan_resources()
2454 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in bcdma_alloc_chan_resources()
2455 __func__, uc->id, uc->config.dir); in bcdma_alloc_chan_resources()
2463 if (udma_is_chan_running(uc)) { in bcdma_alloc_chan_resources()
2464 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in bcdma_alloc_chan_resources()
2465 udma_reset_chan(uc, false); in bcdma_alloc_chan_resources()
2466 if (udma_is_chan_running(uc)) { in bcdma_alloc_chan_resources()
2467 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in bcdma_alloc_chan_resources()
2473 uc->dma_dev = dmaengine_get_dma_device(chan); in bcdma_alloc_chan_resources()
2474 if (uc->config.dir == DMA_MEM_TO_MEM && !uc->config.tr_trigger_type) { in bcdma_alloc_chan_resources()
2475 uc->config.hdesc_size = cppi5_trdesc_calc_size( in bcdma_alloc_chan_resources()
2478 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in bcdma_alloc_chan_resources()
2479 uc->config.hdesc_size, in bcdma_alloc_chan_resources()
2482 if (!uc->hdesc_pool) { in bcdma_alloc_chan_resources()
2485 uc->use_dma_pool = false; in bcdma_alloc_chan_resources()
2490 uc->use_dma_pool = true; in bcdma_alloc_chan_resources()
2491 } else if (uc->config.dir != DMA_MEM_TO_MEM) { in bcdma_alloc_chan_resources()
2493 ret = navss_psil_pair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2494 uc->config.dst_thread); in bcdma_alloc_chan_resources()
2498 uc->config.src_thread, uc->config.dst_thread); in bcdma_alloc_chan_resources()
2502 uc->psil_paired = true; in bcdma_alloc_chan_resources()
2505 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in bcdma_alloc_chan_resources()
2506 if (uc->irq_num_ring <= 0) { in bcdma_alloc_chan_resources()
2513 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in bcdma_alloc_chan_resources()
2514 IRQF_TRIGGER_HIGH, uc->name, uc); in bcdma_alloc_chan_resources()
2516 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in bcdma_alloc_chan_resources()
2521 if (is_slave_direction(uc->config.dir)) { in bcdma_alloc_chan_resources()
2522 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in bcdma_alloc_chan_resources()
2523 if (uc->irq_num_udma <= 0) { in bcdma_alloc_chan_resources()
2526 free_irq(uc->irq_num_ring, uc); in bcdma_alloc_chan_resources()
2531 ret = request_irq(uc->irq_num_udma, udma_udma_irq_handler, 0, in bcdma_alloc_chan_resources()
2532 uc->name, uc); in bcdma_alloc_chan_resources()
2535 uc->id); in bcdma_alloc_chan_resources()
2536 free_irq(uc->irq_num_ring, uc); in bcdma_alloc_chan_resources()
2540 uc->irq_num_udma = 0; in bcdma_alloc_chan_resources()
2543 udma_reset_rings(uc); in bcdma_alloc_chan_resources()
2545 INIT_DELAYED_WORK_ONSTACK(&uc->tx_drain.work, in bcdma_alloc_chan_resources()
2550 uc->irq_num_ring = 0; in bcdma_alloc_chan_resources()
2551 uc->irq_num_udma = 0; in bcdma_alloc_chan_resources()
2553 if (uc->psil_paired) in bcdma_alloc_chan_resources()
2554 navss_psil_unpair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2555 uc->config.dst_thread); in bcdma_alloc_chan_resources()
2556 uc->psil_paired = false; in bcdma_alloc_chan_resources()
2558 bcdma_free_bchan_resources(uc); in bcdma_alloc_chan_resources()
2559 udma_free_tx_resources(uc); in bcdma_alloc_chan_resources()
2560 udma_free_rx_resources(uc); in bcdma_alloc_chan_resources()
2562 udma_reset_uchan(uc); in bcdma_alloc_chan_resources()
2564 if (uc->use_dma_pool) { in bcdma_alloc_chan_resources()
2565 dma_pool_destroy(uc->hdesc_pool); in bcdma_alloc_chan_resources()
2566 uc->use_dma_pool = false; in bcdma_alloc_chan_resources()
2575 struct udma_chan *uc = to_udma_chan(chan); in bcdma_router_config() local
2578 if (!uc->bchan) in bcdma_router_config()
2581 if (uc->config.tr_trigger_type != 1 && uc->config.tr_trigger_type != 2) in bcdma_router_config()
2584 trigger_event = uc->ud->soc_data->bcdma_trigger_event_offset; in bcdma_router_config()
2585 trigger_event += (uc->bchan->id * 2) + uc->config.tr_trigger_type - 1; in bcdma_router_config()
2592 struct udma_chan *uc = to_udma_chan(chan); in pktdma_alloc_chan_resources() local
2602 reinit_completion(&uc->teardown_completed); in pktdma_alloc_chan_resources()
2603 complete_all(&uc->teardown_completed); in pktdma_alloc_chan_resources()
2604 uc->state = UDMA_CHAN_IS_IDLE; in pktdma_alloc_chan_resources()
2606 switch (uc->config.dir) { in pktdma_alloc_chan_resources()
2609 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in pktdma_alloc_chan_resources()
2610 uc->id); in pktdma_alloc_chan_resources()
2612 ret = udma_alloc_tx_resources(uc); in pktdma_alloc_chan_resources()
2614 uc->config.remote_thread_id = -1; in pktdma_alloc_chan_resources()
2618 uc->config.src_thread = ud->psil_base + uc->tchan->id; in pktdma_alloc_chan_resources()
2619 uc->config.dst_thread = uc->config.remote_thread_id; in pktdma_alloc_chan_resources()
2620 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in pktdma_alloc_chan_resources()
2622 irq_ring_idx = uc->tchan->tflow_id + oes->pktdma_tchan_flow; in pktdma_alloc_chan_resources()
2624 ret = pktdma_tisci_tx_channel_config(uc); in pktdma_alloc_chan_resources()
2628 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in pktdma_alloc_chan_resources()
2629 uc->id); in pktdma_alloc_chan_resources()
2631 ret = udma_alloc_rx_resources(uc); in pktdma_alloc_chan_resources()
2633 uc->config.remote_thread_id = -1; in pktdma_alloc_chan_resources()
2637 uc->config.src_thread = uc->config.remote_thread_id; in pktdma_alloc_chan_resources()
2638 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in pktdma_alloc_chan_resources()
2641 irq_ring_idx = uc->rflow->id + oes->pktdma_rchan_flow; in pktdma_alloc_chan_resources()
2643 ret = pktdma_tisci_rx_channel_config(uc); in pktdma_alloc_chan_resources()
2647 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in pktdma_alloc_chan_resources()
2648 __func__, uc->id, uc->config.dir); in pktdma_alloc_chan_resources()
2656 if (udma_is_chan_running(uc)) { in pktdma_alloc_chan_resources()
2657 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in pktdma_alloc_chan_resources()
2658 udma_reset_chan(uc, false); in pktdma_alloc_chan_resources()
2659 if (udma_is_chan_running(uc)) { in pktdma_alloc_chan_resources()
2660 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in pktdma_alloc_chan_resources()
2666 uc->dma_dev = dmaengine_get_dma_device(chan); in pktdma_alloc_chan_resources()
2667 uc->hdesc_pool = dma_pool_create(uc->name, uc->dma_dev, in pktdma_alloc_chan_resources()
2668 uc->config.hdesc_size, ud->desc_align, in pktdma_alloc_chan_resources()
2670 if (!uc->hdesc_pool) { in pktdma_alloc_chan_resources()
2673 uc->use_dma_pool = false; in pktdma_alloc_chan_resources()
2678 uc->use_dma_pool = true; in pktdma_alloc_chan_resources()
2681 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2684 uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2688 uc->psil_paired = true; in pktdma_alloc_chan_resources()
2690 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in pktdma_alloc_chan_resources()
2691 if (uc->irq_num_ring <= 0) { in pktdma_alloc_chan_resources()
2698 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in pktdma_alloc_chan_resources()
2699 IRQF_TRIGGER_HIGH, uc->name, uc); in pktdma_alloc_chan_resources()
2701 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in pktdma_alloc_chan_resources()
2705 uc->irq_num_udma = 0; in pktdma_alloc_chan_resources()
2707 udma_reset_rings(uc); in pktdma_alloc_chan_resources()
2709 INIT_DELAYED_WORK_ONSTACK(&uc->tx_drain.work, in pktdma_alloc_chan_resources()
2712 if (uc->tchan) in pktdma_alloc_chan_resources()
2715 uc->id, uc->tchan->id, uc->tchan->tflow_id, in pktdma_alloc_chan_resources()
2716 uc->config.remote_thread_id); in pktdma_alloc_chan_resources()
2717 else if (uc->rchan) in pktdma_alloc_chan_resources()
2720 uc->id, uc->rchan->id, uc->rflow->id, in pktdma_alloc_chan_resources()
2721 uc->config.remote_thread_id); in pktdma_alloc_chan_resources()
2725 uc->irq_num_ring = 0; in pktdma_alloc_chan_resources()
2727 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2728 uc->psil_paired = false; in pktdma_alloc_chan_resources()
2730 udma_free_tx_resources(uc); in pktdma_alloc_chan_resources()
2731 udma_free_rx_resources(uc); in pktdma_alloc_chan_resources()
2733 udma_reset_uchan(uc); in pktdma_alloc_chan_resources()
2735 dma_pool_destroy(uc->hdesc_pool); in pktdma_alloc_chan_resources()
2736 uc->use_dma_pool = false; in pktdma_alloc_chan_resources()
2744 struct udma_chan *uc = to_udma_chan(chan); in udma_slave_config() local
2746 memcpy(&uc->cfg, cfg, sizeof(uc->cfg)); in udma_slave_config()
2751 static struct udma_desc *udma_alloc_tr_desc(struct udma_chan *uc, in udma_alloc_tr_desc() argument
2768 dev_err(uc->ud->dev, "Unsupported TR size of %zu\n", tr_size); in udma_alloc_tr_desc()
2783 if (uc->use_dma_pool) { in udma_alloc_tr_desc()
2784 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_alloc_tr_desc()
2785 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_alloc_tr_desc()
2792 uc->ud->desc_align); in udma_alloc_tr_desc()
2793 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
2811 if (uc->cyclic) in udma_alloc_tr_desc()
2815 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_alloc_tr_desc()
2817 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_alloc_tr_desc()
2820 cppi5_desc_set_pktids(tr_desc, uc->id, in udma_alloc_tr_desc()
2873 udma_prep_slave_sg_tr(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_tr() argument
2897 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_tr()
2903 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_tr()
2906 asel = (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_slave_sg_tr()
2915 dev_err(uc->ud->dev, "size %u is not supported\n", in udma_prep_slave_sg_tr()
2917 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_tr()
2957 udma_prep_slave_sg_triggered_tr(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_triggered_tr() argument
2976 dev_addr = uc->cfg.src_addr; in udma_prep_slave_sg_triggered_tr()
2977 dev_width = uc->cfg.src_addr_width; in udma_prep_slave_sg_triggered_tr()
2978 burst = uc->cfg.src_maxburst; in udma_prep_slave_sg_triggered_tr()
2979 port_window = uc->cfg.src_port_window_size; in udma_prep_slave_sg_triggered_tr()
2981 dev_addr = uc->cfg.dst_addr; in udma_prep_slave_sg_triggered_tr()
2982 dev_width = uc->cfg.dst_addr_width; in udma_prep_slave_sg_triggered_tr()
2983 burst = uc->cfg.dst_maxburst; in udma_prep_slave_sg_triggered_tr()
2984 port_window = uc->cfg.dst_port_window_size; in udma_prep_slave_sg_triggered_tr()
2986 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_slave_sg_triggered_tr()
2995 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3013 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3027 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_triggered_tr()
3033 if (uc->ud->match_data->type == DMA_TYPE_UDMA) { in udma_prep_slave_sg_triggered_tr()
3036 asel = (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_slave_sg_triggered_tr()
3049 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_slave_sg_triggered_tr()
3051 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_triggered_tr()
3060 uc->config.tr_trigger_type, in udma_prep_slave_sg_triggered_tr()
3107 uc->config.tr_trigger_type, in udma_prep_slave_sg_triggered_tr()
3155 static int udma_configure_statictr(struct udma_chan *uc, struct udma_desc *d, in udma_configure_statictr() argument
3159 if (uc->config.ep_type != PSIL_EP_PDMA_XY) in udma_configure_statictr()
3191 if (uc->config.pkt_mode || !uc->cyclic) { in udma_configure_statictr()
3194 if (uc->cyclic) in udma_configure_statictr()
3199 if (uc->config.dir == DMA_DEV_TO_MEM && in udma_configure_statictr()
3200 d->static_tr.bstcnt > uc->ud->match_data->statictr_z_mask) in udma_configure_statictr()
3210 udma_prep_slave_sg_pkt(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_pkt() argument
3229 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_prep_slave_sg_pkt()
3231 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_prep_slave_sg_pkt()
3233 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_pkt()
3236 asel = (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_slave_sg_pkt()
3244 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_slave_sg_pkt()
3248 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3251 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_pkt()
3257 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_slave_sg_pkt()
3263 cppi5_desc_set_pktids(&desc->hdr, uc->id, in udma_prep_slave_sg_pkt()
3280 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA || in udma_prep_slave_sg_pkt()
3286 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3289 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_pkt()
3304 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_attach_metadata() local
3309 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_attach_metadata()
3312 if (!data || len > uc->config.metadata_size) in udma_attach_metadata()
3315 if (uc->config.needs_epib && len < CPPI5_INFO0_HDESC_EPIB_SIZE) in udma_attach_metadata()
3322 if (uc->config.needs_epib) in udma_attach_metadata()
3327 if (uc->config.needs_epib) in udma_attach_metadata()
3340 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_get_metadata_ptr() local
3343 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_get_metadata_ptr()
3348 *max_len = uc->config.metadata_size; in udma_get_metadata_ptr()
3361 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_set_metadata_len() local
3366 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_set_metadata_len()
3369 if (payload_len > uc->config.metadata_size) in udma_set_metadata_len()
3372 if (uc->config.needs_epib && payload_len < CPPI5_INFO0_HDESC_EPIB_SIZE) in udma_set_metadata_len()
3377 if (uc->config.needs_epib) { in udma_set_metadata_len()
3399 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_slave_sg() local
3404 if (dir != uc->config.dir && in udma_prep_slave_sg()
3405 (uc->config.dir == DMA_MEM_TO_MEM && !uc->config.tr_trigger_type)) { in udma_prep_slave_sg()
3408 __func__, uc->id, in udma_prep_slave_sg()
3409 dmaengine_get_direction_text(uc->config.dir), in udma_prep_slave_sg()
3415 dev_width = uc->cfg.src_addr_width; in udma_prep_slave_sg()
3416 burst = uc->cfg.src_maxburst; in udma_prep_slave_sg()
3418 dev_width = uc->cfg.dst_addr_width; in udma_prep_slave_sg()
3419 burst = uc->cfg.dst_maxburst; in udma_prep_slave_sg()
3428 uc->config.tx_flags = tx_flags; in udma_prep_slave_sg()
3430 if (uc->config.pkt_mode) in udma_prep_slave_sg()
3431 d = udma_prep_slave_sg_pkt(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
3433 else if (is_slave_direction(uc->config.dir)) in udma_prep_slave_sg()
3434 d = udma_prep_slave_sg_tr(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
3437 d = udma_prep_slave_sg_triggered_tr(uc, sgl, sglen, dir, in udma_prep_slave_sg()
3448 if (udma_configure_statictr(uc, d, dev_width, burst)) { in udma_prep_slave_sg()
3449 dev_err(uc->ud->dev, in udma_prep_slave_sg()
3453 udma_free_hwdesc(uc, d); in udma_prep_slave_sg()
3458 if (uc->config.metadata_size) in udma_prep_slave_sg()
3461 return vchan_tx_prep(&uc->vc, &d->vd, tx_flags); in udma_prep_slave_sg()
3465 udma_prep_dma_cyclic_tr(struct udma_chan *uc, dma_addr_t buf_addr, in udma_prep_dma_cyclic_tr() argument
3480 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_cyclic_tr()
3487 d = udma_alloc_tr_desc(uc, tr_size, periods * num_tr, dir); in udma_prep_dma_cyclic_tr()
3492 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_dma_cyclic_tr()
3496 ((u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT); in udma_prep_dma_cyclic_tr()
3535 udma_prep_dma_cyclic_pkt(struct udma_chan *uc, dma_addr_t buf_addr, in udma_prep_dma_cyclic_pkt() argument
3558 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_prep_dma_cyclic_pkt()
3560 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_prep_dma_cyclic_pkt()
3562 if (uc->ud->match_data->type != DMA_TYPE_UDMA) in udma_prep_dma_cyclic_pkt()
3563 buf_addr |= (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_cyclic_pkt()
3570 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_dma_cyclic_pkt()
3574 dev_err(uc->ud->dev, in udma_prep_dma_cyclic_pkt()
3577 udma_free_hwdesc(uc, d); in udma_prep_dma_cyclic_pkt()
3582 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_dma_cyclic_pkt()
3589 cppi5_desc_set_pktids(&h_desc->hdr, uc->id, in udma_prep_dma_cyclic_pkt()
3607 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_dma_cyclic() local
3612 if (dir != uc->config.dir) { in udma_prep_dma_cyclic()
3615 __func__, uc->id, in udma_prep_dma_cyclic()
3616 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_cyclic()
3621 uc->cyclic = true; in udma_prep_dma_cyclic()
3624 dev_width = uc->cfg.src_addr_width; in udma_prep_dma_cyclic()
3625 burst = uc->cfg.src_maxburst; in udma_prep_dma_cyclic()
3627 dev_width = uc->cfg.dst_addr_width; in udma_prep_dma_cyclic()
3628 burst = uc->cfg.dst_maxburst; in udma_prep_dma_cyclic()
3630 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_dma_cyclic()
3637 if (uc->config.pkt_mode) in udma_prep_dma_cyclic()
3638 d = udma_prep_dma_cyclic_pkt(uc, buf_addr, buf_len, period_len, in udma_prep_dma_cyclic()
3641 d = udma_prep_dma_cyclic_tr(uc, buf_addr, buf_len, period_len, in udma_prep_dma_cyclic()
3653 if (udma_configure_statictr(uc, d, dev_width, burst)) { in udma_prep_dma_cyclic()
3654 dev_err(uc->ud->dev, in udma_prep_dma_cyclic()
3658 udma_free_hwdesc(uc, d); in udma_prep_dma_cyclic()
3663 if (uc->config.metadata_size) in udma_prep_dma_cyclic()
3666 return vchan_tx_prep(&uc->vc, &d->vd, flags); in udma_prep_dma_cyclic()
3673 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_dma_memcpy() local
3680 if (uc->config.dir != DMA_MEM_TO_MEM) { in udma_prep_dma_memcpy()
3683 __func__, uc->id, in udma_prep_dma_memcpy()
3684 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_memcpy()
3692 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
3697 d = udma_alloc_tr_desc(uc, tr_size, num_tr, DMA_MEM_TO_MEM); in udma_prep_dma_memcpy()
3706 if (uc->ud->match_data->type != DMA_TYPE_UDMA) { in udma_prep_dma_memcpy()
3707 src |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3708 dest |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3752 if (uc->config.metadata_size) in udma_prep_dma_memcpy()
3755 return vchan_tx_prep(&uc->vc, &d->vd, tx_flags); in udma_prep_dma_memcpy()
3760 struct udma_chan *uc = to_udma_chan(chan); in udma_issue_pending() local
3763 spin_lock_irqsave(&uc->vc.lock, flags); in udma_issue_pending()
3766 if (vchan_issue_pending(&uc->vc) && !uc->desc) { in udma_issue_pending()
3772 if (!(uc->state == UDMA_CHAN_IS_TERMINATING && in udma_issue_pending()
3773 udma_is_chan_running(uc))) in udma_issue_pending()
3774 udma_start(uc); in udma_issue_pending()
3777 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_issue_pending()
3784 struct udma_chan *uc = to_udma_chan(chan); in udma_tx_status() local
3788 spin_lock_irqsave(&uc->vc.lock, flags); in udma_tx_status()
3792 if (!udma_is_chan_running(uc)) in udma_tx_status()
3795 if (ret == DMA_IN_PROGRESS && udma_is_chan_paused(uc)) in udma_tx_status()
3801 if (uc->desc && uc->desc->vd.tx.cookie == cookie) { in udma_tx_status()
3804 u32 residue = uc->desc->residue; in udma_tx_status()
3807 if (uc->desc->dir == DMA_MEM_TO_DEV) { in udma_tx_status()
3808 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_tx_status()
3810 if (uc->config.ep_type != PSIL_EP_NATIVE) { in udma_tx_status()
3811 peer_bcnt = udma_tchanrt_read(uc, in udma_tx_status()
3817 } else if (uc->desc->dir == DMA_DEV_TO_MEM) { in udma_tx_status()
3818 bcnt = udma_rchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_tx_status()
3820 if (uc->config.ep_type != PSIL_EP_NATIVE) { in udma_tx_status()
3821 peer_bcnt = udma_rchanrt_read(uc, in udma_tx_status()
3828 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_tx_status()
3831 if (bcnt && !(bcnt % uc->desc->residue)) in udma_tx_status()
3834 residue -= bcnt % uc->desc->residue; in udma_tx_status()
3836 if (!residue && (uc->config.dir == DMA_DEV_TO_MEM || !delay)) { in udma_tx_status()
3849 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_tx_status()
3855 struct udma_chan *uc = to_udma_chan(chan); in udma_pause() local
3858 switch (uc->config.dir) { in udma_pause()
3860 udma_rchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_pause()
3865 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_pause()
3870 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_CTL_REG, in udma_pause()
3883 struct udma_chan *uc = to_udma_chan(chan); in udma_resume() local
3886 switch (uc->config.dir) { in udma_resume()
3888 udma_rchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_resume()
3893 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_resume()
3897 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_CTL_REG, in udma_resume()
3909 struct udma_chan *uc = to_udma_chan(chan); in udma_terminate_all() local
3913 spin_lock_irqsave(&uc->vc.lock, flags); in udma_terminate_all()
3915 if (udma_is_chan_running(uc)) in udma_terminate_all()
3916 udma_stop(uc); in udma_terminate_all()
3918 if (uc->desc) { in udma_terminate_all()
3919 uc->terminated_desc = uc->desc; in udma_terminate_all()
3920 uc->desc = NULL; in udma_terminate_all()
3921 uc->terminated_desc->terminated = true; in udma_terminate_all()
3922 cancel_delayed_work(&uc->tx_drain.work); in udma_terminate_all()
3925 uc->paused = false; in udma_terminate_all()
3927 vchan_get_all_descriptors(&uc->vc, &head); in udma_terminate_all()
3928 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_terminate_all()
3929 vchan_dma_desc_free_list(&uc->vc, &head); in udma_terminate_all()
3936 struct udma_chan *uc = to_udma_chan(chan); in udma_synchronize() local
3939 vchan_synchronize(&uc->vc); in udma_synchronize()
3941 if (uc->state == UDMA_CHAN_IS_TERMINATING) { in udma_synchronize()
3942 timeout = wait_for_completion_timeout(&uc->teardown_completed, in udma_synchronize()
3945 dev_warn(uc->ud->dev, "chan%d teardown timeout!\n", in udma_synchronize()
3946 uc->id); in udma_synchronize()
3947 udma_dump_chan_stdata(uc); in udma_synchronize()
3948 udma_reset_chan(uc, true); in udma_synchronize()
3952 udma_reset_chan(uc, false); in udma_synchronize()
3953 if (udma_is_chan_running(uc)) in udma_synchronize()
3954 dev_warn(uc->ud->dev, "chan%d refused to stop!\n", uc->id); in udma_synchronize()
3956 cancel_delayed_work_sync(&uc->tx_drain.work); in udma_synchronize()
3957 udma_reset_rings(uc); in udma_synchronize()
3964 struct udma_chan *uc = to_udma_chan(&vc->chan); in udma_desc_pre_callback() local
3973 udma_fetch_epib(uc, d); in udma_desc_pre_callback()
4035 struct udma_chan *uc = to_udma_chan(chan); in udma_free_chan_resources() local
4039 if (uc->terminated_desc) { in udma_free_chan_resources()
4040 udma_reset_chan(uc, false); in udma_free_chan_resources()
4041 udma_reset_rings(uc); in udma_free_chan_resources()
4044 cancel_delayed_work_sync(&uc->tx_drain.work); in udma_free_chan_resources()
4046 if (uc->irq_num_ring > 0) { in udma_free_chan_resources()
4047 free_irq(uc->irq_num_ring, uc); in udma_free_chan_resources()
4049 uc->irq_num_ring = 0; in udma_free_chan_resources()
4051 if (uc->irq_num_udma > 0) { in udma_free_chan_resources()
4052 free_irq(uc->irq_num_udma, uc); in udma_free_chan_resources()
4054 uc->irq_num_udma = 0; in udma_free_chan_resources()
4058 if (uc->psil_paired) { in udma_free_chan_resources()
4059 navss_psil_unpair(ud, uc->config.src_thread, in udma_free_chan_resources()
4060 uc->config.dst_thread); in udma_free_chan_resources()
4061 uc->psil_paired = false; in udma_free_chan_resources()
4064 vchan_free_chan_resources(&uc->vc); in udma_free_chan_resources()
4065 tasklet_kill(&uc->vc.task); in udma_free_chan_resources()
4067 bcdma_free_bchan_resources(uc); in udma_free_chan_resources()
4068 udma_free_tx_resources(uc); in udma_free_chan_resources()
4069 udma_free_rx_resources(uc); in udma_free_chan_resources()
4070 udma_reset_uchan(uc); in udma_free_chan_resources()
4072 if (uc->use_dma_pool) { in udma_free_chan_resources()
4073 dma_pool_destroy(uc->hdesc_pool); in udma_free_chan_resources()
4074 uc->use_dma_pool = false; in udma_free_chan_resources()
4094 struct udma_chan *uc; in udma_dma_filter_fn() local
4102 uc = to_udma_chan(chan); in udma_dma_filter_fn()
4103 ucc = &uc->config; in udma_dma_filter_fn()
4104 ud = uc->ud; in udma_dma_filter_fn()
4189 dev_dbg(ud->dev, "chan%d: Remote thread: 0x%04x (%s)\n", uc->id, in udma_dma_filter_fn()
4195 dev_dbg(ud->dev, "chan%d: triggered channel (type: %u)\n", uc->id, in udma_dma_filter_fn()
5153 struct udma_chan *uc = to_udma_chan(chan); in udma_dbg_summary_show_chan() local
5154 struct udma_chan_config *ucc = &uc->config; in udma_dbg_summary_show_chan()
5162 dmaengine_get_direction_text(uc->config.dir)); in udma_dbg_summary_show_chan()
5164 switch (uc->config.dir) { in udma_dbg_summary_show_chan()
5166 if (uc->ud->match_data->type == DMA_TYPE_BCDMA) { in udma_dbg_summary_show_chan()
5167 seq_printf(s, "bchan%d)\n", uc->bchan->id); in udma_dbg_summary_show_chan()
5171 seq_printf(s, "chan%d pair [0x%04x -> 0x%04x], ", uc->tchan->id, in udma_dbg_summary_show_chan()
5175 seq_printf(s, "rchan%d [0x%04x -> 0x%04x], ", uc->rchan->id, in udma_dbg_summary_show_chan()
5177 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5178 seq_printf(s, "rflow%d, ", uc->rflow->id); in udma_dbg_summary_show_chan()
5181 seq_printf(s, "tchan%d [0x%04x -> 0x%04x], ", uc->tchan->id, in udma_dbg_summary_show_chan()
5183 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5184 seq_printf(s, "tflow%d, ", uc->tchan->tflow_id); in udma_dbg_summary_show_chan()
5465 struct udma_chan *uc = &ud->channels[i]; in udma_probe() local
5467 uc->ud = ud; in udma_probe()
5468 uc->vc.desc_free = udma_desc_free; in udma_probe()
5469 uc->id = i; in udma_probe()
5470 uc->bchan = NULL; in udma_probe()
5471 uc->tchan = NULL; in udma_probe()
5472 uc->rchan = NULL; in udma_probe()
5473 uc->config.remote_thread_id = -1; in udma_probe()
5474 uc->config.mapped_channel_id = -1; in udma_probe()
5475 uc->config.default_flow_id = -1; in udma_probe()
5476 uc->config.dir = DMA_MEM_TO_MEM; in udma_probe()
5477 uc->name = devm_kasprintf(dev, GFP_KERNEL, "%s chan%d", in udma_probe()
5480 vchan_init(&uc->vc, &ud->ddev); in udma_probe()
5482 tasklet_setup(&uc->vc.task, udma_vchan_complete); in udma_probe()
5483 init_completion(&uc->teardown_completed); in udma_probe()
5484 INIT_DELAYED_WORK(&uc->tx_drain.work, udma_check_tx_completion); in udma_probe()