Lines Matching refs:ud
279 struct udma_dev *ud; member
399 static int navss_psil_pair(struct udma_dev *ud, u32 src_thread, u32 dst_thread) in navss_psil_pair() argument
401 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_pair()
409 static int navss_psil_unpair(struct udma_dev *ud, u32 src_thread, in navss_psil_unpair() argument
412 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_unpair()
467 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata()
554 struct udma_dev *ud = container_of(work, typeof(*ud), purge_work); in udma_purge_desc_work() local
559 spin_lock_irqsave(&ud->lock, flags); in udma_purge_desc_work()
560 list_splice_tail_init(&ud->desc_to_purge, &head); in udma_purge_desc_work()
561 spin_unlock_irqrestore(&ud->lock, flags); in udma_purge_desc_work()
573 if (!list_empty(&ud->desc_to_purge)) in udma_purge_desc_work()
574 schedule_work(&ud->purge_work); in udma_purge_desc_work()
579 struct udma_dev *ud = to_udma_dev(vd->tx.chan->device); in udma_desc_free() local
593 spin_lock_irqsave(&ud->lock, flags); in udma_desc_free()
594 list_add_tail(&vd->node, &ud->desc_to_purge); in udma_desc_free()
595 spin_unlock_irqrestore(&ud->lock, flags); in udma_desc_free()
597 schedule_work(&ud->purge_work); in udma_desc_free()
645 return uc->ud->rx_flush.hwdescs[uc->config.pkt_mode].cppi5_desc_paddr; in udma_get_rx_flush_hwdesc_paddr()
837 uc->ud->ddev.device_free_chan_resources(&uc->vc.chan); in udma_reset_chan()
841 ret = uc->ud->ddev.device_alloc_chan_resources(&uc->vc.chan); in udma_reset_chan()
864 if (uc->ud->match_data->type == DMA_TYPE_UDMA && ucc->pkt_mode && in udma_start_desc()
926 uc->ud->match_data; in udma_start()
1177 dev_err(uc->ud->dev, "not matching descriptors!\n"); in udma_ring_irq_handler()
1251 static int __udma_alloc_gp_rflow_range(struct udma_dev *ud, int from, int cnt) in __udma_alloc_gp_rflow_range() argument
1258 tmp_from = ud->rchan_cnt; in __udma_alloc_gp_rflow_range()
1260 if (tmp_from < ud->rchan_cnt) in __udma_alloc_gp_rflow_range()
1263 if (tmp_from + cnt > ud->rflow_cnt) in __udma_alloc_gp_rflow_range()
1266 bitmap_or(tmp, ud->rflow_gp_map, ud->rflow_gp_map_allocated, in __udma_alloc_gp_rflow_range()
1267 ud->rflow_cnt); in __udma_alloc_gp_rflow_range()
1270 ud->rflow_cnt, in __udma_alloc_gp_rflow_range()
1272 if (start >= ud->rflow_cnt) in __udma_alloc_gp_rflow_range()
1278 bitmap_set(ud->rflow_gp_map_allocated, start, cnt); in __udma_alloc_gp_rflow_range()
1282 static int __udma_free_gp_rflow_range(struct udma_dev *ud, int from, int cnt) in __udma_free_gp_rflow_range() argument
1284 if (from < ud->rchan_cnt) in __udma_free_gp_rflow_range()
1286 if (from + cnt > ud->rflow_cnt) in __udma_free_gp_rflow_range()
1289 bitmap_clear(ud->rflow_gp_map_allocated, from, cnt); in __udma_free_gp_rflow_range()
1293 static struct udma_rflow *__udma_get_rflow(struct udma_dev *ud, int id) in __udma_get_rflow() argument
1302 if (id < 0 || id >= ud->rflow_cnt) in __udma_get_rflow()
1305 if (test_bit(id, ud->rflow_in_use)) in __udma_get_rflow()
1308 if (ud->rflow_gp_map) { in __udma_get_rflow()
1310 if (!test_bit(id, ud->rflow_gp_map) && in __udma_get_rflow()
1311 !test_bit(id, ud->rflow_gp_map_allocated)) in __udma_get_rflow()
1315 dev_dbg(ud->dev, "get rflow%d\n", id); in __udma_get_rflow()
1316 set_bit(id, ud->rflow_in_use); in __udma_get_rflow()
1317 return &ud->rflows[id]; in __udma_get_rflow()
1320 static void __udma_put_rflow(struct udma_dev *ud, struct udma_rflow *rflow) in __udma_put_rflow() argument
1322 if (!test_bit(rflow->id, ud->rflow_in_use)) { in __udma_put_rflow()
1323 dev_err(ud->dev, "attempt to put unused rflow%d\n", rflow->id); in __udma_put_rflow()
1327 dev_dbg(ud->dev, "put rflow%d\n", rflow->id); in __udma_put_rflow()
1328 clear_bit(rflow->id, ud->rflow_in_use); in __udma_put_rflow()
1332 static struct udma_##res *__udma_reserve_##res(struct udma_dev *ud, \
1337 if (test_bit(id, ud->res##_map)) { \
1338 dev_err(ud->dev, "res##%d is in use\n", id); \
1344 if (tpl >= ud->res##_tpl.levels) \
1345 tpl = ud->res##_tpl.levels - 1; \
1347 start = ud->res##_tpl.start_idx[tpl]; \
1349 id = find_next_zero_bit(ud->res##_map, ud->res##_cnt, \
1351 if (id == ud->res##_cnt) { \
1356 set_bit(id, ud->res##_map); \
1357 return &ud->res##s[id]; \
1366 struct udma_dev *ud = uc->ud; in bcdma_get_bchan() local
1371 dev_dbg(ud->dev, "chan%d: already have bchan%d allocated\n", in bcdma_get_bchan()
1383 tpl = ud->bchan_tpl.levels - 1; in bcdma_get_bchan()
1385 uc->bchan = __udma_reserve_bchan(ud, tpl, -1); in bcdma_get_bchan()
1399 struct udma_dev *ud = uc->ud; in udma_get_tchan() local
1403 dev_dbg(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_tchan()
1413 uc->tchan = __udma_reserve_tchan(ud, uc->config.channel_tpl, in udma_get_tchan()
1421 if (ud->tflow_cnt) { in udma_get_tchan()
1430 if (test_bit(tflow_id, ud->tflow_map)) { in udma_get_tchan()
1431 dev_err(ud->dev, "tflow%d is in use\n", tflow_id); in udma_get_tchan()
1432 clear_bit(uc->tchan->id, ud->tchan_map); in udma_get_tchan()
1438 set_bit(tflow_id, ud->tflow_map); in udma_get_tchan()
1448 struct udma_dev *ud = uc->ud; in udma_get_rchan() local
1452 dev_dbg(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_rchan()
1462 uc->rchan = __udma_reserve_rchan(ud, uc->config.channel_tpl, in udma_get_rchan()
1475 struct udma_dev *ud = uc->ud; in udma_get_chan_pair() local
1479 dev_info(ud->dev, "chan%d: already have %d pair allocated\n", in udma_get_chan_pair()
1485 dev_err(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_chan_pair()
1489 dev_err(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_chan_pair()
1495 end = min(ud->tchan_cnt, ud->rchan_cnt); in udma_get_chan_pair()
1500 chan_id = ud->tchan_tpl.start_idx[ud->tchan_tpl.levels - 1]; in udma_get_chan_pair()
1502 if (!test_bit(chan_id, ud->tchan_map) && in udma_get_chan_pair()
1503 !test_bit(chan_id, ud->rchan_map)) in udma_get_chan_pair()
1510 set_bit(chan_id, ud->tchan_map); in udma_get_chan_pair()
1511 set_bit(chan_id, ud->rchan_map); in udma_get_chan_pair()
1512 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
1513 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
1523 struct udma_dev *ud = uc->ud; in udma_get_rflow() local
1527 dev_err(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
1532 dev_dbg(ud->dev, "chan%d: already have rflow%d allocated\n", in udma_get_rflow()
1537 uc->rflow = __udma_get_rflow(ud, flow_id); in udma_get_rflow()
1549 struct udma_dev *ud = uc->ud; in bcdma_put_bchan() local
1552 dev_dbg(ud->dev, "chan%d: put bchan%d\n", uc->id, in bcdma_put_bchan()
1554 clear_bit(uc->bchan->id, ud->bchan_map); in bcdma_put_bchan()
1562 struct udma_dev *ud = uc->ud; in udma_put_rchan() local
1565 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
1567 clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
1574 struct udma_dev *ud = uc->ud; in udma_put_tchan() local
1577 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
1579 clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
1582 clear_bit(uc->tchan->tflow_id, ud->tflow_map); in udma_put_tchan()
1590 struct udma_dev *ud = uc->ud; in udma_put_rflow() local
1593 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
1595 __udma_put_rflow(ud, uc->rflow); in udma_put_rflow()
1617 struct udma_dev *ud = uc->ud; in bcdma_alloc_bchan_resources() local
1624 ret = k3_ringacc_request_rings_pair(ud->ringacc, uc->bchan->id, -1, in bcdma_alloc_bchan_resources()
1637 k3_configure_chan_coherency(&uc->vc.chan, ud->asel); in bcdma_alloc_bchan_resources()
1638 ring_cfg.asel = ud->asel; in bcdma_alloc_bchan_resources()
1675 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources() local
1687 ring_idx = ud->bchan_cnt + tchan->id; in udma_alloc_tx_resources()
1689 ret = k3_ringacc_request_rings_pair(ud->ringacc, ring_idx, -1, in udma_alloc_tx_resources()
1700 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_alloc_tx_resources()
1750 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources() local
1775 if (ud->tflow_cnt) in udma_alloc_rx_resources()
1776 fd_ring_id = ud->tflow_cnt + rflow->id; in udma_alloc_rx_resources()
1778 fd_ring_id = ud->bchan_cnt + ud->tchan_cnt + ud->echan_cnt + in udma_alloc_rx_resources()
1781 ret = k3_ringacc_request_rings_pair(ud->ringacc, fd_ring_id, -1, in udma_alloc_rx_resources()
1791 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_alloc_rx_resources()
1864 struct udma_dev *ud = uc->ud; in udma_tisci_m2m_channel_config() local
1865 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_m2m_channel_config()
1878 if (ud->match_data->flags & UDMA_FLAG_BURST_SIZE) { in udma_tisci_m2m_channel_config()
1879 tpl = udma_get_chan_tpl_index(&ud->tchan_tpl, tchan->id); in udma_tisci_m2m_channel_config()
1881 burst_size = ud->match_data->burst_size[tpl]; in udma_tisci_m2m_channel_config()
1890 req_tx.tx_atype = ud->atype; in udma_tisci_m2m_channel_config()
1898 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in udma_tisci_m2m_channel_config()
1908 req_rx.rx_atype = ud->atype; in udma_tisci_m2m_channel_config()
1916 dev_err(ud->dev, "rchan%d alloc failed %d\n", rchan->id, ret); in udma_tisci_m2m_channel_config()
1923 struct udma_dev *ud = uc->ud; in bcdma_tisci_m2m_channel_config() local
1924 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_m2m_channel_config()
1932 if (ud->match_data->flags & UDMA_FLAG_BURST_SIZE) { in bcdma_tisci_m2m_channel_config()
1933 tpl = udma_get_chan_tpl_index(&ud->bchan_tpl, bchan->id); in bcdma_tisci_m2m_channel_config()
1935 burst_size = ud->match_data->burst_size[tpl]; in bcdma_tisci_m2m_channel_config()
1949 dev_err(ud->dev, "bchan%d cfg failed %d\n", bchan->id, ret); in bcdma_tisci_m2m_channel_config()
1956 struct udma_dev *ud = uc->ud; in udma_tisci_tx_channel_config() local
1957 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_tx_channel_config()
1983 ud->match_data->flags & UDMA_FLAG_TDTYPE) { in udma_tisci_tx_channel_config()
1992 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in udma_tisci_tx_channel_config()
1999 struct udma_dev *ud = uc->ud; in bcdma_tisci_tx_channel_config() local
2000 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_tx_channel_config()
2010 if (ud->match_data->flags & UDMA_FLAG_TDTYPE) { in bcdma_tisci_tx_channel_config()
2019 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in bcdma_tisci_tx_channel_config()
2028 struct udma_dev *ud = uc->ud; in udma_tisci_rx_channel_config() local
2029 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_rx_channel_config()
2058 dev_err(ud->dev, "rchan%d cfg failed %d\n", rchan->id, ret); in udma_tisci_rx_channel_config()
2102 dev_err(ud->dev, "flow%d config failed: %d\n", rchan->id, ret); in udma_tisci_rx_channel_config()
2109 struct udma_dev *ud = uc->ud; in bcdma_tisci_rx_channel_config() local
2110 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_rx_channel_config()
2122 dev_err(ud->dev, "rchan%d cfg failed %d\n", rchan->id, ret); in bcdma_tisci_rx_channel_config()
2129 struct udma_dev *ud = uc->ud; in pktdma_tisci_rx_channel_config() local
2130 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in pktdma_tisci_rx_channel_config()
2142 dev_err(ud->dev, "rchan%d cfg failed %d\n", uc->rchan->id, ret); in pktdma_tisci_rx_channel_config()
2167 dev_err(ud->dev, "flow%d config failed: %d\n", uc->rflow->id, in pktdma_tisci_rx_channel_config()
2176 struct udma_dev *ud = to_udma_dev(chan->device); in udma_alloc_chan_resources() local
2177 const struct udma_soc_data *soc_data = ud->soc_data; in udma_alloc_chan_resources()
2182 uc->dma_dev = ud->dev; in udma_alloc_chan_resources()
2195 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in udma_alloc_chan_resources()
2197 ud->desc_align, in udma_alloc_chan_resources()
2200 dev_err(ud->ddev.dev, in udma_alloc_chan_resources()
2219 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in udma_alloc_chan_resources()
2238 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2239 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2249 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in udma_alloc_chan_resources()
2256 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2267 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in udma_alloc_chan_resources()
2275 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2285 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in udma_alloc_chan_resources()
2297 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
2300 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
2307 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2309 dev_err(ud->dev, "PSI-L pairing failed: 0x%04x -> 0x%04x\n", in udma_alloc_chan_resources()
2318 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in udma_alloc_chan_resources()
2327 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in udma_alloc_chan_resources()
2333 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in udma_alloc_chan_resources()
2335 dev_err(ud->dev, "Failed to get udma irq (index: %u)\n", in udma_alloc_chan_resources()
2345 dev_err(ud->dev, "chan%d: UDMA irq request failed\n", in udma_alloc_chan_resources()
2362 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2381 struct udma_dev *ud = to_udma_dev(chan->device); in bcdma_alloc_chan_resources() local
2382 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in bcdma_alloc_chan_resources()
2400 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2414 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in bcdma_alloc_chan_resources()
2423 uc->config.src_thread = ud->psil_base + uc->tchan->id; in bcdma_alloc_chan_resources()
2434 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2444 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in bcdma_alloc_chan_resources()
2454 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in bcdma_alloc_chan_resources()
2464 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in bcdma_alloc_chan_resources()
2467 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in bcdma_alloc_chan_resources()
2478 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in bcdma_alloc_chan_resources()
2480 ud->desc_align, in bcdma_alloc_chan_resources()
2483 dev_err(ud->ddev.dev, in bcdma_alloc_chan_resources()
2493 ret = navss_psil_pair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2496 dev_err(ud->dev, in bcdma_alloc_chan_resources()
2505 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in bcdma_alloc_chan_resources()
2507 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in bcdma_alloc_chan_resources()
2516 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in bcdma_alloc_chan_resources()
2522 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in bcdma_alloc_chan_resources()
2524 dev_err(ud->dev, "Failed to get bcdma irq (index: %u)\n", in bcdma_alloc_chan_resources()
2534 dev_err(ud->dev, "chan%d: BCDMA irq request failed\n", in bcdma_alloc_chan_resources()
2554 navss_psil_unpair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2584 trigger_event = uc->ud->soc_data->bcdma_trigger_event_offset; in bcdma_router_config()
2593 struct udma_dev *ud = to_udma_dev(chan->device); in pktdma_alloc_chan_resources() local
2594 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in pktdma_alloc_chan_resources()
2609 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in pktdma_alloc_chan_resources()
2618 uc->config.src_thread = ud->psil_base + uc->tchan->id; in pktdma_alloc_chan_resources()
2628 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in pktdma_alloc_chan_resources()
2638 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in pktdma_alloc_chan_resources()
2647 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in pktdma_alloc_chan_resources()
2657 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in pktdma_alloc_chan_resources()
2660 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in pktdma_alloc_chan_resources()
2668 uc->config.hdesc_size, ud->desc_align, in pktdma_alloc_chan_resources()
2671 dev_err(ud->ddev.dev, in pktdma_alloc_chan_resources()
2681 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2683 dev_err(ud->dev, "PSI-L pairing failed: 0x%04x -> 0x%04x\n", in pktdma_alloc_chan_resources()
2690 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in pktdma_alloc_chan_resources()
2692 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in pktdma_alloc_chan_resources()
2701 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in pktdma_alloc_chan_resources()
2713 dev_dbg(ud->dev, in pktdma_alloc_chan_resources()
2718 dev_dbg(ud->dev, in pktdma_alloc_chan_resources()
2727 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2768 dev_err(uc->ud->dev, "Unsupported TR size of %zu\n", tr_size); in udma_alloc_tr_desc()
2792 uc->ud->desc_align); in udma_alloc_tr_desc()
2793 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
2903 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_tr()
2915 dev_err(uc->ud->dev, "size %u is not supported\n", in udma_prep_slave_sg_tr()
2986 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_slave_sg_triggered_tr()
2995 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3013 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3033 if (uc->ud->match_data->type == DMA_TYPE_UDMA) { in udma_prep_slave_sg_triggered_tr()
3049 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_slave_sg_triggered_tr()
3200 d->static_tr.bstcnt > uc->ud->match_data->statictr_z_mask) in udma_configure_statictr()
3233 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_pkt()
3248 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3280 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA || in udma_prep_slave_sg_pkt()
3286 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3449 dev_err(uc->ud->dev, in udma_prep_slave_sg()
3480 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_cyclic_tr()
3492 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_dma_cyclic_tr()
3562 if (uc->ud->match_data->type != DMA_TYPE_UDMA) in udma_prep_dma_cyclic_pkt()
3574 dev_err(uc->ud->dev, in udma_prep_dma_cyclic_pkt()
3630 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_dma_cyclic()
3654 dev_err(uc->ud->dev, in udma_prep_dma_cyclic()
3692 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
3706 if (uc->ud->match_data->type != DMA_TYPE_UDMA) { in udma_prep_dma_memcpy()
3707 src |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3708 dest |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3945 dev_warn(uc->ud->dev, "chan%d teardown timeout!\n", in udma_synchronize()
3954 dev_warn(uc->ud->dev, "chan%d refused to stop!\n", uc->id); in udma_synchronize()
4036 struct udma_dev *ud = to_udma_dev(chan->device); in udma_free_chan_resources() local
4059 navss_psil_unpair(ud, uc->config.src_thread, in udma_free_chan_resources()
4095 struct udma_dev *ud; in udma_dma_filter_fn() local
4104 ud = uc->ud; in udma_dma_filter_fn()
4108 dev_err(ud->dev, "Invalid channel atype: %u\n", in udma_dma_filter_fn()
4114 dev_err(ud->dev, "Invalid channel asel: %u\n", in udma_dma_filter_fn()
4135 dev_err(ud->dev, "No configuration for psi-l thread 0x%04x\n", in udma_dma_filter_fn()
4144 if (ud->match_data->type == DMA_TYPE_BCDMA && in udma_dma_filter_fn()
4146 dev_err(ud->dev, in udma_dma_filter_fn()
4161 if (ud->match_data->type == DMA_TYPE_PKTDMA && in udma_dma_filter_fn()
4171 const struct udma_match_data *match_data = ud->match_data; in udma_dma_filter_fn()
4187 ucc->metadata_size, ud->desc_align); in udma_dma_filter_fn()
4189 dev_dbg(ud->dev, "chan%d: Remote thread: 0x%04x (%s)\n", uc->id, in udma_dma_filter_fn()
4195 dev_dbg(ud->dev, "chan%d: triggered channel (type: %u)\n", uc->id, in udma_dma_filter_fn()
4205 struct udma_dev *ud = ofdma->of_dma_data; in udma_of_xlate() local
4206 dma_cap_mask_t mask = ud->ddev.cap_mask; in udma_of_xlate()
4210 if (ud->match_data->type == DMA_TYPE_BCDMA) { in udma_of_xlate()
4225 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_of_xlate()
4241 dev_err(ud->dev, "get channel fail in %s.\n", __func__); in udma_of_xlate()
4400 static int udma_get_mmrs(struct platform_device *pdev, struct udma_dev *ud) in udma_get_mmrs() argument
4405 ud->mmrs[MMR_GCFG] = devm_platform_ioremap_resource_byname(pdev, mmr_names[MMR_GCFG]); in udma_get_mmrs()
4406 if (IS_ERR(ud->mmrs[MMR_GCFG])) in udma_get_mmrs()
4407 return PTR_ERR(ud->mmrs[MMR_GCFG]); in udma_get_mmrs()
4409 cap2 = udma_read(ud->mmrs[MMR_GCFG], 0x28); in udma_get_mmrs()
4410 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_get_mmrs()
4412 switch (ud->match_data->type) { in udma_get_mmrs()
4414 ud->rflow_cnt = UDMA_CAP3_RFLOW_CNT(cap3); in udma_get_mmrs()
4415 ud->tchan_cnt = UDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4416 ud->echan_cnt = UDMA_CAP2_ECHAN_CNT(cap2); in udma_get_mmrs()
4417 ud->rchan_cnt = UDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4420 ud->bchan_cnt = BCDMA_CAP2_BCHAN_CNT(cap2); in udma_get_mmrs()
4421 ud->tchan_cnt = BCDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4422 ud->rchan_cnt = BCDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4423 ud->rflow_cnt = ud->rchan_cnt; in udma_get_mmrs()
4426 cap4 = udma_read(ud->mmrs[MMR_GCFG], 0x30); in udma_get_mmrs()
4427 ud->tchan_cnt = UDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4428 ud->rchan_cnt = UDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4429 ud->rflow_cnt = UDMA_CAP3_RFLOW_CNT(cap3); in udma_get_mmrs()
4430 ud->tflow_cnt = PKTDMA_CAP4_TFLOW_CNT(cap4); in udma_get_mmrs()
4437 if (i == MMR_BCHANRT && ud->bchan_cnt == 0) in udma_get_mmrs()
4439 if (i == MMR_TCHANRT && ud->tchan_cnt == 0) in udma_get_mmrs()
4441 if (i == MMR_RCHANRT && ud->rchan_cnt == 0) in udma_get_mmrs()
4444 ud->mmrs[i] = devm_platform_ioremap_resource_byname(pdev, mmr_names[i]); in udma_get_mmrs()
4445 if (IS_ERR(ud->mmrs[i])) in udma_get_mmrs()
4446 return PTR_ERR(ud->mmrs[i]); in udma_get_mmrs()
4452 static void udma_mark_resource_ranges(struct udma_dev *ud, unsigned long *map, in udma_mark_resource_ranges() argument
4458 dev_dbg(ud->dev, "ti_sci resource range for %s: %d:%d | %d:%d\n", name, in udma_mark_resource_ranges()
4471 static int udma_setup_resources(struct udma_dev *ud) in udma_setup_resources() argument
4474 struct device *dev = ud->dev; in udma_setup_resources()
4476 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_setup_resources()
4480 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_setup_resources()
4483 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4484 ud->tchan_tpl.start_idx[0] = 8; in udma_setup_resources()
4487 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4488 ud->tchan_tpl.start_idx[0] = 2; in udma_setup_resources()
4490 ud->tchan_tpl.levels = 3; in udma_setup_resources()
4491 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in udma_setup_resources()
4492 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4494 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4495 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4497 ud->tchan_tpl.levels = 1; in udma_setup_resources()
4500 ud->rchan_tpl.levels = ud->tchan_tpl.levels; in udma_setup_resources()
4501 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in udma_setup_resources()
4502 ud->rchan_tpl.start_idx[1] = ud->tchan_tpl.start_idx[1]; in udma_setup_resources()
4504 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in udma_setup_resources()
4506 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in udma_setup_resources()
4508 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in udma_setup_resources()
4510 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in udma_setup_resources()
4512 ud->rflow_gp_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4515 ud->rflow_gp_map_allocated = devm_kcalloc(dev, in udma_setup_resources()
4516 BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4519 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4522 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, sizeof(*ud->rflows), in udma_setup_resources()
4525 if (!ud->tchan_map || !ud->rchan_map || !ud->rflow_gp_map || in udma_setup_resources()
4526 !ud->rflow_gp_map_allocated || !ud->tchans || !ud->rchans || in udma_setup_resources()
4527 !ud->rflows || !ud->rflow_in_use) in udma_setup_resources()
4535 bitmap_set(ud->rflow_gp_map_allocated, 0, ud->rchan_cnt); in udma_setup_resources()
4538 bitmap_set(ud->rflow_gp_map, 0, ud->rflow_cnt); in udma_setup_resources()
4554 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in udma_setup_resources()
4557 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in udma_setup_resources()
4559 udma_mark_resource_ranges(ud, ud->tchan_map, in udma_setup_resources()
4567 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in udma_setup_resources()
4570 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in udma_setup_resources()
4572 udma_mark_resource_ranges(ud, ud->rchan_map, in udma_setup_resources()
4583 irq_res.desc[0].num = ud->tchan_cnt; in udma_setup_resources()
4596 irq_res.desc[i].num = ud->rchan_cnt; in udma_setup_resources()
4601 ud->soc_data->oes.udma_rchan; in udma_setup_resources()
4606 ud->soc_data->oes.udma_rchan; in udma_setup_resources()
4611 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in udma_setup_resources()
4614 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in udma_setup_resources()
4622 bitmap_clear(ud->rflow_gp_map, ud->rchan_cnt, in udma_setup_resources()
4623 ud->rflow_cnt - ud->rchan_cnt); in udma_setup_resources()
4626 udma_mark_resource_ranges(ud, ud->rflow_gp_map, in udma_setup_resources()
4633 static int bcdma_setup_resources(struct udma_dev *ud) in bcdma_setup_resources() argument
4636 struct device *dev = ud->dev; in bcdma_setup_resources()
4638 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_setup_resources()
4639 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in bcdma_setup_resources()
4643 cap = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in bcdma_setup_resources()
4645 ud->bchan_tpl.levels = 3; in bcdma_setup_resources()
4646 ud->bchan_tpl.start_idx[1] = BCDMA_CAP3_UBCHAN_CNT(cap); in bcdma_setup_resources()
4647 ud->bchan_tpl.start_idx[0] = BCDMA_CAP3_HBCHAN_CNT(cap); in bcdma_setup_resources()
4649 ud->bchan_tpl.levels = 2; in bcdma_setup_resources()
4650 ud->bchan_tpl.start_idx[0] = BCDMA_CAP3_HBCHAN_CNT(cap); in bcdma_setup_resources()
4652 ud->bchan_tpl.levels = 1; in bcdma_setup_resources()
4655 cap = udma_read(ud->mmrs[MMR_GCFG], 0x30); in bcdma_setup_resources()
4657 ud->rchan_tpl.levels = 3; in bcdma_setup_resources()
4658 ud->rchan_tpl.start_idx[1] = BCDMA_CAP4_URCHAN_CNT(cap); in bcdma_setup_resources()
4659 ud->rchan_tpl.start_idx[0] = BCDMA_CAP4_HRCHAN_CNT(cap); in bcdma_setup_resources()
4661 ud->rchan_tpl.levels = 2; in bcdma_setup_resources()
4662 ud->rchan_tpl.start_idx[0] = BCDMA_CAP4_HRCHAN_CNT(cap); in bcdma_setup_resources()
4664 ud->rchan_tpl.levels = 1; in bcdma_setup_resources()
4668 ud->tchan_tpl.levels = 3; in bcdma_setup_resources()
4669 ud->tchan_tpl.start_idx[1] = BCDMA_CAP4_UTCHAN_CNT(cap); in bcdma_setup_resources()
4670 ud->tchan_tpl.start_idx[0] = BCDMA_CAP4_HTCHAN_CNT(cap); in bcdma_setup_resources()
4672 ud->tchan_tpl.levels = 2; in bcdma_setup_resources()
4673 ud->tchan_tpl.start_idx[0] = BCDMA_CAP4_HTCHAN_CNT(cap); in bcdma_setup_resources()
4675 ud->tchan_tpl.levels = 1; in bcdma_setup_resources()
4678 ud->bchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->bchan_cnt), in bcdma_setup_resources()
4680 ud->bchans = devm_kcalloc(dev, ud->bchan_cnt, sizeof(*ud->bchans), in bcdma_setup_resources()
4682 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in bcdma_setup_resources()
4684 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in bcdma_setup_resources()
4686 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in bcdma_setup_resources()
4688 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in bcdma_setup_resources()
4691 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rchan_cnt), in bcdma_setup_resources()
4694 ud->rflows = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rflows), in bcdma_setup_resources()
4697 if (!ud->bchan_map || !ud->tchan_map || !ud->rchan_map || in bcdma_setup_resources()
4698 !ud->rflow_in_use || !ud->bchans || !ud->tchans || !ud->rchans || in bcdma_setup_resources()
4699 !ud->rflows) in bcdma_setup_resources()
4706 if (i == RM_RANGE_BCHAN && ud->bchan_cnt == 0) in bcdma_setup_resources()
4708 if (i == RM_RANGE_TCHAN && ud->tchan_cnt == 0) in bcdma_setup_resources()
4710 if (i == RM_RANGE_RCHAN && ud->rchan_cnt == 0) in bcdma_setup_resources()
4722 if (ud->bchan_cnt) { in bcdma_setup_resources()
4725 bitmap_zero(ud->bchan_map, ud->bchan_cnt); in bcdma_setup_resources()
4728 bitmap_fill(ud->bchan_map, ud->bchan_cnt); in bcdma_setup_resources()
4730 udma_mark_resource_ranges(ud, ud->bchan_map, in bcdma_setup_resources()
4738 if (ud->tchan_cnt) { in bcdma_setup_resources()
4741 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in bcdma_setup_resources()
4744 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in bcdma_setup_resources()
4746 udma_mark_resource_ranges(ud, ud->tchan_map, in bcdma_setup_resources()
4754 if (ud->rchan_cnt) { in bcdma_setup_resources()
4757 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in bcdma_setup_resources()
4760 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in bcdma_setup_resources()
4762 udma_mark_resource_ranges(ud, ud->rchan_map, in bcdma_setup_resources()
4772 if (ud->bchan_cnt) { in bcdma_setup_resources()
4776 irq_res.desc[0].num = ud->bchan_cnt; in bcdma_setup_resources()
4786 if (ud->tchan_cnt) { in bcdma_setup_resources()
4790 irq_res.desc[i].num = ud->tchan_cnt; in bcdma_setup_resources()
4792 irq_res.desc[i + 1].num = ud->tchan_cnt; in bcdma_setup_resources()
4806 if (ud->rchan_cnt) { in bcdma_setup_resources()
4810 irq_res.desc[i].num = ud->rchan_cnt; in bcdma_setup_resources()
4812 irq_res.desc[i + 1].num = ud->rchan_cnt; in bcdma_setup_resources()
4827 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in bcdma_setup_resources()
4830 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in bcdma_setup_resources()
4837 static int pktdma_setup_resources(struct udma_dev *ud) in pktdma_setup_resources() argument
4840 struct device *dev = ud->dev; in pktdma_setup_resources()
4842 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in pktdma_setup_resources()
4843 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in pktdma_setup_resources()
4847 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in pktdma_setup_resources()
4849 ud->tchan_tpl.levels = 3; in pktdma_setup_resources()
4850 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in pktdma_setup_resources()
4851 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in pktdma_setup_resources()
4853 ud->tchan_tpl.levels = 2; in pktdma_setup_resources()
4854 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in pktdma_setup_resources()
4856 ud->tchan_tpl.levels = 1; in pktdma_setup_resources()
4859 ud->rchan_tpl.levels = ud->tchan_tpl.levels; in pktdma_setup_resources()
4860 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in pktdma_setup_resources()
4861 ud->rchan_tpl.start_idx[1] = ud->tchan_tpl.start_idx[1]; in pktdma_setup_resources()
4863 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in pktdma_setup_resources()
4865 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in pktdma_setup_resources()
4867 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in pktdma_setup_resources()
4869 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in pktdma_setup_resources()
4871 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in pktdma_setup_resources()
4874 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, sizeof(*ud->rflows), in pktdma_setup_resources()
4876 ud->tflow_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tflow_cnt), in pktdma_setup_resources()
4879 if (!ud->tchan_map || !ud->rchan_map || !ud->tflow_map || !ud->tchans || in pktdma_setup_resources()
4880 !ud->rchans || !ud->rflows || !ud->rflow_in_use) in pktdma_setup_resources()
4897 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in pktdma_setup_resources()
4899 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in pktdma_setup_resources()
4901 udma_mark_resource_ranges(ud, ud->tchan_map, in pktdma_setup_resources()
4908 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in pktdma_setup_resources()
4910 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in pktdma_setup_resources()
4912 udma_mark_resource_ranges(ud, ud->rchan_map, in pktdma_setup_resources()
4920 bitmap_zero(ud->rflow_in_use, ud->rflow_cnt); in pktdma_setup_resources()
4923 bitmap_fill(ud->rflow_in_use, ud->rflow_cnt); in pktdma_setup_resources()
4925 udma_mark_resource_ranges(ud, ud->rflow_in_use, in pktdma_setup_resources()
4934 bitmap_zero(ud->tflow_map, ud->tflow_cnt); in pktdma_setup_resources()
4937 bitmap_fill(ud->tflow_map, ud->tflow_cnt); in pktdma_setup_resources()
4939 udma_mark_resource_ranges(ud, ud->tflow_map, in pktdma_setup_resources()
4950 irq_res.desc[0].num = ud->tflow_cnt; in pktdma_setup_resources()
4962 irq_res.desc[i].num = ud->rflow_cnt; in pktdma_setup_resources()
4970 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in pktdma_setup_resources()
4973 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in pktdma_setup_resources()
4980 static int setup_resources(struct udma_dev *ud) in setup_resources() argument
4982 struct device *dev = ud->dev; in setup_resources()
4985 switch (ud->match_data->type) { in setup_resources()
4987 ret = udma_setup_resources(ud); in setup_resources()
4990 ret = bcdma_setup_resources(ud); in setup_resources()
4993 ret = pktdma_setup_resources(ud); in setup_resources()
5002 ch_count = ud->bchan_cnt + ud->tchan_cnt + ud->rchan_cnt; in setup_resources()
5003 if (ud->bchan_cnt) in setup_resources()
5004 ch_count -= bitmap_weight(ud->bchan_map, ud->bchan_cnt); in setup_resources()
5005 ch_count -= bitmap_weight(ud->tchan_map, ud->tchan_cnt); in setup_resources()
5006 ch_count -= bitmap_weight(ud->rchan_map, ud->rchan_cnt); in setup_resources()
5010 ud->channels = devm_kcalloc(dev, ch_count, sizeof(*ud->channels), in setup_resources()
5012 if (!ud->channels) in setup_resources()
5015 switch (ud->match_data->type) { in setup_resources()
5020 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5021 ud->tchan_cnt), in setup_resources()
5022 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5023 ud->rchan_cnt), in setup_resources()
5024 ud->rflow_cnt - bitmap_weight(ud->rflow_gp_map, in setup_resources()
5025 ud->rflow_cnt)); in setup_resources()
5031 ud->bchan_cnt - bitmap_weight(ud->bchan_map, in setup_resources()
5032 ud->bchan_cnt), in setup_resources()
5033 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5034 ud->tchan_cnt), in setup_resources()
5035 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5036 ud->rchan_cnt)); in setup_resources()
5042 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5043 ud->tchan_cnt), in setup_resources()
5044 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5045 ud->rchan_cnt)); in setup_resources()
5054 static int udma_setup_rx_flush(struct udma_dev *ud) in udma_setup_rx_flush() argument
5056 struct udma_rx_flush *rx_flush = &ud->rx_flush; in udma_setup_rx_flush()
5060 struct device *dev = ud->dev; in udma_setup_rx_flush()
5082 ud->desc_align); in udma_setup_rx_flush()
5122 ud->desc_align); in udma_setup_rx_flush()
5166 if (uc->ud->match_data->type == DMA_TYPE_BCDMA) { in udma_dbg_summary_show_chan()
5177 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5183 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5222 static enum dmaengine_alignment udma_get_copy_align(struct udma_dev *ud) in udma_get_copy_align() argument
5224 const struct udma_match_data *match_data = ud->match_data; in udma_get_copy_align()
5231 if (ud->bchan_cnt) in udma_get_copy_align()
5232 tpl = udma_get_chan_tpl_index(&ud->bchan_tpl, 0); in udma_get_copy_align()
5233 else if (ud->tchan_cnt) in udma_get_copy_align()
5234 tpl = udma_get_chan_tpl_index(&ud->tchan_tpl, 0); in udma_get_copy_align()
5261 struct udma_dev *ud; in udma_probe() local
5270 ud = devm_kzalloc(dev, sizeof(*ud), GFP_KERNEL); in udma_probe()
5271 if (!ud) in udma_probe()
5284 ud->match_data = match->data; in udma_probe()
5291 ud->soc_data = soc->data; in udma_probe()
5293 ret = udma_get_mmrs(pdev, ud); in udma_probe()
5297 ud->tisci_rm.tisci = ti_sci_get_by_phandle(dev->of_node, "ti,sci"); in udma_probe()
5298 if (IS_ERR(ud->tisci_rm.tisci)) in udma_probe()
5299 return PTR_ERR(ud->tisci_rm.tisci); in udma_probe()
5302 &ud->tisci_rm.tisci_dev_id); in udma_probe()
5307 pdev->id = ud->tisci_rm.tisci_dev_id; in udma_probe()
5310 &ud->tisci_rm.tisci_navss_dev_id); in udma_probe()
5316 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_probe()
5318 &ud->atype); in udma_probe()
5319 if (!ret && ud->atype > 2) { in udma_probe()
5320 dev_err(dev, "Invalid atype: %u\n", ud->atype); in udma_probe()
5325 &ud->asel); in udma_probe()
5326 if (!ret && ud->asel > 15) { in udma_probe()
5327 dev_err(dev, "Invalid asel: %u\n", ud->asel); in udma_probe()
5332 ud->tisci_rm.tisci_udmap_ops = &ud->tisci_rm.tisci->ops.rm_udmap_ops; in udma_probe()
5333 ud->tisci_rm.tisci_psil_ops = &ud->tisci_rm.tisci->ops.rm_psil_ops; in udma_probe()
5335 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_probe()
5336 ud->ringacc = of_k3_ringacc_get_by_phandle(dev->of_node, "ti,ringacc"); in udma_probe()
5340 ring_init_data.tisci = ud->tisci_rm.tisci; in udma_probe()
5341 ring_init_data.tisci_dev_id = ud->tisci_rm.tisci_dev_id; in udma_probe()
5342 if (ud->match_data->type == DMA_TYPE_BCDMA) { in udma_probe()
5343 ring_init_data.num_rings = ud->bchan_cnt + in udma_probe()
5344 ud->tchan_cnt + in udma_probe()
5345 ud->rchan_cnt; in udma_probe()
5347 ring_init_data.num_rings = ud->rflow_cnt + in udma_probe()
5348 ud->tflow_cnt; in udma_probe()
5351 ud->ringacc = k3_ringacc_dmarings_init(pdev, &ring_init_data); in udma_probe()
5354 if (IS_ERR(ud->ringacc)) in udma_probe()
5355 return PTR_ERR(ud->ringacc); in udma_probe()
5364 dma_cap_set(DMA_SLAVE, ud->ddev.cap_mask); in udma_probe()
5366 if (ud->match_data->type != DMA_TYPE_PKTDMA) { in udma_probe()
5367 dma_cap_set(DMA_CYCLIC, ud->ddev.cap_mask); in udma_probe()
5368 ud->ddev.device_prep_dma_cyclic = udma_prep_dma_cyclic; in udma_probe()
5371 ud->ddev.device_config = udma_slave_config; in udma_probe()
5372 ud->ddev.device_prep_slave_sg = udma_prep_slave_sg; in udma_probe()
5373 ud->ddev.device_issue_pending = udma_issue_pending; in udma_probe()
5374 ud->ddev.device_tx_status = udma_tx_status; in udma_probe()
5375 ud->ddev.device_pause = udma_pause; in udma_probe()
5376 ud->ddev.device_resume = udma_resume; in udma_probe()
5377 ud->ddev.device_terminate_all = udma_terminate_all; in udma_probe()
5378 ud->ddev.device_synchronize = udma_synchronize; in udma_probe()
5380 ud->ddev.dbg_summary_show = udma_dbg_summary_show; in udma_probe()
5383 switch (ud->match_data->type) { in udma_probe()
5385 ud->ddev.device_alloc_chan_resources = in udma_probe()
5389 ud->ddev.device_alloc_chan_resources = in udma_probe()
5391 ud->ddev.device_router_config = bcdma_router_config; in udma_probe()
5394 ud->ddev.device_alloc_chan_resources = in udma_probe()
5400 ud->ddev.device_free_chan_resources = udma_free_chan_resources; in udma_probe()
5402 ud->ddev.src_addr_widths = TI_UDMAC_BUSWIDTHS; in udma_probe()
5403 ud->ddev.dst_addr_widths = TI_UDMAC_BUSWIDTHS; in udma_probe()
5404 ud->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in udma_probe()
5405 ud->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in udma_probe()
5406 ud->ddev.desc_metadata_modes = DESC_METADATA_CLIENT | in udma_probe()
5408 if (ud->match_data->enable_memcpy_support && in udma_probe()
5409 !(ud->match_data->type == DMA_TYPE_BCDMA && ud->bchan_cnt == 0)) { in udma_probe()
5410 dma_cap_set(DMA_MEMCPY, ud->ddev.cap_mask); in udma_probe()
5411 ud->ddev.device_prep_dma_memcpy = udma_prep_dma_memcpy; in udma_probe()
5412 ud->ddev.directions |= BIT(DMA_MEM_TO_MEM); in udma_probe()
5415 ud->ddev.dev = dev; in udma_probe()
5416 ud->dev = dev; in udma_probe()
5417 ud->psil_base = ud->match_data->psil_base; in udma_probe()
5419 INIT_LIST_HEAD(&ud->ddev.channels); in udma_probe()
5420 INIT_LIST_HEAD(&ud->desc_to_purge); in udma_probe()
5422 ch_count = setup_resources(ud); in udma_probe()
5426 spin_lock_init(&ud->lock); in udma_probe()
5427 INIT_WORK(&ud->purge_work, udma_purge_desc_work); in udma_probe()
5429 ud->desc_align = 64; in udma_probe()
5430 if (ud->desc_align < dma_get_cache_alignment()) in udma_probe()
5431 ud->desc_align = dma_get_cache_alignment(); in udma_probe()
5433 ret = udma_setup_rx_flush(ud); in udma_probe()
5437 for (i = 0; i < ud->bchan_cnt; i++) { in udma_probe()
5438 struct udma_bchan *bchan = &ud->bchans[i]; in udma_probe()
5441 bchan->reg_rt = ud->mmrs[MMR_BCHANRT] + i * 0x1000; in udma_probe()
5444 for (i = 0; i < ud->tchan_cnt; i++) { in udma_probe()
5445 struct udma_tchan *tchan = &ud->tchans[i]; in udma_probe()
5448 tchan->reg_rt = ud->mmrs[MMR_TCHANRT] + i * 0x1000; in udma_probe()
5451 for (i = 0; i < ud->rchan_cnt; i++) { in udma_probe()
5452 struct udma_rchan *rchan = &ud->rchans[i]; in udma_probe()
5455 rchan->reg_rt = ud->mmrs[MMR_RCHANRT] + i * 0x1000; in udma_probe()
5458 for (i = 0; i < ud->rflow_cnt; i++) { in udma_probe()
5459 struct udma_rflow *rflow = &ud->rflows[i]; in udma_probe()
5465 struct udma_chan *uc = &ud->channels[i]; in udma_probe()
5467 uc->ud = ud; in udma_probe()
5480 vchan_init(&uc->vc, &ud->ddev); in udma_probe()
5488 ud->ddev.copy_align = udma_get_copy_align(ud); in udma_probe()
5490 ret = dma_async_device_register(&ud->ddev); in udma_probe()
5496 platform_set_drvdata(pdev, ud); in udma_probe()
5498 ret = of_dma_controller_register(dev->of_node, udma_of_xlate, ud); in udma_probe()
5501 dma_async_device_unregister(&ud->ddev); in udma_probe()