/linux-6.6.21/drivers/dma/ |
D | imx-sdma.c | 440 struct sdma_engine *sdma; member 685 static inline u32 chnenbl_ofs(struct sdma_engine *sdma, unsigned int event) in chnenbl_ofs() argument 687 u32 chnenbl0 = sdma->drvdata->chnenbl0; in chnenbl_ofs() 694 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership() local 701 evt = readl_relaxed(sdma->regs + SDMA_H_EVTOVR); in sdma_config_ownership() 702 mcu = readl_relaxed(sdma->regs + SDMA_H_HOSTOVR); in sdma_config_ownership() 703 dsp = readl_relaxed(sdma->regs + SDMA_H_DSPOVR); in sdma_config_ownership() 720 writel_relaxed(evt, sdma->regs + SDMA_H_EVTOVR); in sdma_config_ownership() 721 writel_relaxed(mcu, sdma->regs + SDMA_H_HOSTOVR); in sdma_config_ownership() 722 writel_relaxed(dsp, sdma->regs + SDMA_H_DSPOVR); in sdma_config_ownership() [all …]
|
/linux-6.6.21/drivers/net/ethernet/marvell/prestera/ |
D | prestera_rxtx.c | 107 struct prestera_sdma sdma; member 110 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument 116 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init() 128 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument 130 return sdma->map_addr + pa; in prestera_sdma_map() 133 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument 142 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init() 150 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument 154 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next() 157 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument [all …]
|
/linux-6.6.21/Documentation/devicetree/bindings/dma/ |
D | fsl,imx-sdma.yaml | 4 $id: http://devicetree.org/schemas/dma/fsl,imx-sdma.yaml# 20 - fsl,imx50-sdma 21 - fsl,imx51-sdma 22 - fsl,imx53-sdma 23 - fsl,imx6q-sdma 24 - fsl,imx7d-sdma 25 - const: fsl,imx35-sdma 28 - fsl,imx6sx-sdma 29 - fsl,imx6sl-sdma 30 - const: fsl,imx6q-sdma [all …]
|
/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_sdma.c | 42 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring() 43 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring() 44 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring() 45 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring() 55 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring() 56 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring() 57 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring() 82 sdma[ring->idx].sdma_meta_data); in amdgpu_sdma_get_csa_mc_addr() 108 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_ras_late_init() 109 r = amdgpu_irq_get(adev, &adev->sdma.ecc_irq, in amdgpu_sdma_ras_late_init() [all …]
|
D | sdma_v4_4_2.c | 105 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_inst_init_golden_registers() 134 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_init_microcode() 288 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_4_2_ring_insert_nop() local 292 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_4_2_ring_insert_nop() 428 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_4_2_inst_gfx_stop() local 433 sdma[i] = &adev->sdma.instance[i].ring; in sdma_v4_4_2_inst_gfx_stop() 435 if ((adev->mman.buffer_funcs_ring == sdma[i]) && unset != 1) { in sdma_v4_4_2_inst_gfx_stop() 474 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_4_2_inst_page_stop() local 480 sdma[i] = &adev->sdma.instance[i].page; in sdma_v4_4_2_inst_page_stop() 482 if ((adev->mman.buffer_funcs_ring == sdma[i]) && in sdma_v4_4_2_inst_page_stop() [all …]
|
D | sdma_v4_0.c | 555 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv() 580 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode() 737 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_0_ring_insert_nop() local 741 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_insert_nop() 880 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_enable() 916 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop() 965 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable() 982 adev->sdma.instance[i].fw_version >= 14) in sdma_v4_0_ctx_switch_enable() 1006 if (adev->sdma.has_page_queue) in sdma_v4_0_enable() 1010 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_enable() [all …]
|
D | sdma_v3_0.c | 254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode() 255 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode() 306 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode() 311 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v3_0_init_microcode() 314 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode() 315 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode() 316 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode() 317 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v3_0_init_microcode() 318 adev->sdma.instance[i].burst_nop = true; in sdma_v3_0_init_microcode() 322 info->fw = adev->sdma.instance[i].fw; in sdma_v3_0_init_microcode() [all …]
|
D | cik_sdma.c | 77 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_free_microcode() 78 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_free_microcode() 134 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode() 139 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in cik_sdma_init_microcode() 146 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_init_microcode() 147 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_init_microcode() 199 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in cik_sdma_ring_insert_nop() local 203 if (sdma && sdma->burst_nop && (i == 0)) in cik_sdma_ring_insert_nop() 313 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_stop() 370 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_ctx_switch_enable() [all …]
|
D | sdma_v2_4.c | 117 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_free_microcode() 118 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode() 148 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode() 153 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v2_4_init_microcode() 156 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode() 157 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v2_4_init_microcode() 158 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v2_4_init_microcode() 159 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v2_4_init_microcode() 160 adev->sdma.instance[i].burst_nop = true; in sdma_v2_4_init_microcode() 165 info->fw = adev->sdma.instance[i].fw; in sdma_v2_4_init_microcode() [all …]
|
D | si_dma.c | 49 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_get_wptr() 57 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_set_wptr() 120 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop() 135 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start() 136 ring = &adev->sdma.instance[i].ring; in si_dma_start() 469 adev->sdma.num_instances = 2; in si_dma_early_init() 487 &adev->sdma.trap_irq); in si_dma_sw_init() 493 &adev->sdma.trap_irq); in si_dma_sw_init() 497 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_sw_init() 498 ring = &adev->sdma.instance[i].ring; in si_dma_sw_init() [all …]
|
D | sdma_v5_2.c | 208 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_insert_nop() local 212 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_insert_nop() 369 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop() 428 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable() 467 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable() 495 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume() 496 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_gfx_resume() 667 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode() 668 if (!adev->sdma.instance[i].fw) in sdma_v5_2_load_microcode() 671 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v5_2_load_microcode() [all …]
|
D | sdma_v6_0.c | 226 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v6_0_ring_insert_nop() local 230 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v6_0_ring_insert_nop() 386 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_stop() 422 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_ctxempty_int_enable() 452 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_enable() 478 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_resume() 479 ring = &adev->sdma.instance[i].ring; in sdma_v6_0_gfx_resume() 548 adev->doorbell_index.sdma_doorbell_range * adev->sdma.num_instances); in sdma_v6_0_gfx_resume() 637 if (!adev->sdma.instance[0].fw) in sdma_v6_0_load_microcode() 646 hdr = (const struct sdma_firmware_header_v2_0 *)adev->sdma.instance[0].fw->data; in sdma_v6_0_load_microcode() [all …]
|
D | sdma_v5_0.c | 243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode() 401 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_0_ring_insert_nop() local 405 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_0_ring_insert_nop() 564 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_stop() 623 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_ctx_switch_enable() 665 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_enable() 692 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_resume() 693 ring = &adev->sdma.instance[i].ring; in sdma_v5_0_gfx_resume() 867 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_load_microcode() 868 if (!adev->sdma.instance[i].fw) in sdma_v5_0_load_microcode() [all …]
|
/linux-6.6.21/arch/arm/boot/dts/ti/omap/ |
D | omap2.dtsi | 62 dmas = <&sdma 9 &sdma 10>; 100 sdma: dma-controller@0 { label 101 compatible = "ti,omap2420-sdma", "ti,omap-sdma"; 136 dmas = <&sdma 35 &sdma 36 &sdma 37 &sdma 38 137 &sdma 39 &sdma 40 &sdma 41 &sdma 42>; 147 dmas = <&sdma 43 &sdma 44 &sdma 45 &sdma 46>; 163 dmas = <&sdma 13>; 172 dmas = <&sdma 49 &sdma 50>; 182 dmas = <&sdma 51 &sdma 52>; 192 dmas = <&sdma 53 &sdma 54>;
|
D | omap3.dtsi | 181 dmas = <&sdma 9 &sdma 10>; 207 dmas = <&sdma 65 &sdma 66>; 289 sdma: dma-controller@0 { label 290 compatible = "ti,omap3430-sdma", "ti,omap-sdma"; 373 dmas = <&sdma 49 &sdma 50>; 383 dmas = <&sdma 51 &sdma 52>; 393 dmas = <&sdma 53 &sdma 54>; 448 dmas = <&sdma 35>, 449 <&sdma 36>, 450 <&sdma 37>, [all …]
|
D | omap2430.dtsi | 186 dmas = <&sdma 31>, 187 <&sdma 32>; 202 dmas = <&sdma 33>, 203 <&sdma 34>; 218 dmas = <&sdma 17>, 219 <&sdma 18>; 234 dmas = <&sdma 19>, 235 <&sdma 20>; 250 dmas = <&sdma 21>, 251 <&sdma 22>; [all …]
|
/linux-6.6.21/arch/arm/boot/dts/nxp/imx/ |
D | imx6sll.dtsi | 160 dmas = <&sdma 14 18 0>, <&sdma 15 18 0>; 184 dmas = <&sdma 3 7 1>, <&sdma 4 7 2>; 196 dmas = <&sdma 5 7 1>, <&sdma 6 7 2>; 208 dmas = <&sdma 7 7 1>, <&sdma 8 7 2>; 220 dmas = <&sdma 9 7 1>, <&sdma 10 7 2>; 233 dmas = <&sdma 31 4 0>, <&sdma 32 4 0>; 246 dmas = <&sdma 25 4 0>, <&sdma 26 4 0>; 259 dmas = <&sdma 27 4 0>, <&sdma 28 4 0>; 271 dmas = <&sdma 37 22 0>, <&sdma 38 22 0>; 284 dmas = <&sdma 41 22 0>, <&sdma 42 22 0>; [all …]
|
D | imx31.dtsi | 135 dmas = <&sdma 8 8 0>, <&sdma 9 8 0>; 182 dmas = <&sdma 20 3 0>; 193 dmas = <&sdma 21 3 0>; 213 dmas = <&sdma 6 8 0>, <&sdma 7 8 0>; 248 dmas = <&sdma 10 8 0>, <&sdma 11 8 0>; 300 sdma: dma-controller@53fd4000 { label 301 compatible = "fsl,imx31-sdma"; 307 fsl,sdma-ram-script-name = "imx/sdma/sdma-imx31.bin"; 348 dmas = <&sdma 30 17 0>;
|
D | imx6qdl.dtsi | 310 dmas = <&sdma 14 18 0>, 311 <&sdma 15 18 0>; 335 dmas = <&sdma 3 7 1>, <&sdma 4 7 2>; 349 dmas = <&sdma 5 7 1>, <&sdma 6 7 2>; 363 dmas = <&sdma 7 7 1>, <&sdma 8 7 2>; 377 dmas = <&sdma 9 7 1>, <&sdma 10 7 2>; 389 dmas = <&sdma 25 4 0>, <&sdma 26 4 0>; 405 dmas = <&sdma 23 21 0>, <&sdma 24 21 0>; 419 dmas = <&sdma 37 1 0>, 420 <&sdma 38 1 0>; [all …]
|
D | imx51.dtsi | 221 dmas = <&sdma 43 5 1>, <&sdma 44 5 2>; 246 dmas = <&sdma 24 1 0>, 247 <&sdma 25 1 0>; 434 dmas = <&sdma 18 4 1>, <&sdma 19 4 2>; 446 dmas = <&sdma 16 4 1>, <&sdma 17 4 2>; 510 sdma: dma-controller@83fb0000 { label 511 compatible = "fsl,imx51-sdma", "fsl,imx35-sdma"; 518 fsl,sdma-ram-script-name = "imx/sdma/sdma-imx51.bin"; 561 dmas = <&sdma 28 0 0>, 562 <&sdma 29 0 0>; [all …]
|
D | imx53.dtsi | 270 dmas = <&sdma 42 4 0>, <&sdma 43 4 0>; 297 dmas = <&sdma 24 1 0>, 298 <&sdma 25 1 0>; 554 dmas = <&sdma 18 4 0>, <&sdma 19 4 0>; 566 dmas = <&sdma 12 4 0>, <&sdma 13 4 0>; 652 dmas = <&sdma 2 4 0>, <&sdma 3 4 0>; 684 dmas = <&sdma 16 4 0>, <&sdma 17 4 0>; 713 sdma: dma-controller@63fb0000 { label 714 compatible = "fsl,imx53-sdma", "fsl,imx35-sdma"; 721 fsl,sdma-ram-script-name = "imx/sdma/sdma-imx53.bin"; [all …]
|
D | imx6sx.dtsi | 262 dmas = <&sdma 14 18 0>, 263 <&sdma 15 18 0>; 336 dmas = <&sdma 25 4 0>, <&sdma 26 4 0>; 352 dmas = <&sdma 23 21 0>, 353 <&sdma 24 21 0>; 366 dmas = <&sdma 37 1 0>, <&sdma 38 1 0>; 380 dmas = <&sdma 41 1 0>, <&sdma 42 1 0>; 394 dmas = <&sdma 45 1 0>, <&sdma 46 1 0>; 416 dmas = <&sdma 17 23 1>, <&sdma 18 23 1>, 417 <&sdma 19 23 1>, <&sdma 20 23 1>, [all …]
|
D | imx6sl.dtsi | 162 dmas = <&sdma 14 18 0>, 163 <&sdma 15 18 0>; 234 dmas = <&sdma 33 4 0>, <&sdma 34 4 0>; 247 dmas = <&sdma 25 4 0>, <&sdma 26 4 0>; 260 dmas = <&sdma 27 4 0>, <&sdma 28 4 0>; 274 dmas = <&sdma 37 1 0>, 275 <&sdma 38 1 0>; 290 dmas = <&sdma 41 1 0>, 291 <&sdma 42 1 0>; 306 dmas = <&sdma 45 1 0>, [all …]
|
/linux-6.6.21/arch/powerpc/platforms/52xx/ |
D | mpc52xx_pic.c | 135 static struct mpc52xx_sdma __iomem *sdma; variable 270 io_be_setbit(&sdma->IntMask, l2irq); in mpc52xx_sdma_mask() 276 io_be_clrbit(&sdma->IntMask, l2irq); in mpc52xx_sdma_unmask() 282 out_be32(&sdma->IntPend, 1 << l2irq); in mpc52xx_sdma_ack() 417 sdma = of_iomap(np, 0); in mpc52xx_init_irq() 419 if (!sdma) in mpc52xx_init_irq() 426 out_be32(&sdma->IntPend, 0xffffffff); /* 1 means clear pending */ in mpc52xx_init_irq() 427 out_be32(&sdma->IntMask, 0xffffffff); /* 1 means disabled */ in mpc52xx_init_irq() 508 status = in_be32(&sdma->IntPend); in mpc52xx_get_irq()
|
/linux-6.6.21/Documentation/devicetree/bindings/sound/ |
D | fsl,asrc.txt | 74 dmas = <&sdma 17 23 1>, <&sdma 18 23 1>, <&sdma 19 23 1>, 75 <&sdma 20 23 1>, <&sdma 21 23 1>, <&sdma 22 23 1>;
|