/linux-6.1.9/drivers/dma/ |
D | mxs-dma.c | 143 struct dma_device dma_device; member 368 dev_dbg(mxs_dma->dma_device.dev, in mxs_dma_int_handler() 402 mxs_chan->ccw = dma_alloc_coherent(mxs_dma->dma_device.dev, in mxs_dma_alloc_chan_resources() 432 dma_free_coherent(mxs_dma->dma_device.dev, CCW_BLOCK_SIZE, in mxs_dma_alloc_chan_resources() 447 dma_free_coherent(mxs_dma->dma_device.dev, CCW_BLOCK_SIZE, in mxs_dma_free_chan_resources() 492 dev_err(mxs_dma->dma_device.dev, in mxs_dma_prep_slave_sg() 537 dev_err(mxs_dma->dma_device.dev, "maximum bytes for sg entry exceeded: %d > %d\n", in mxs_dma_prep_slave_sg() 592 dev_err(mxs_dma->dma_device.dev, in mxs_dma_prep_dma_cyclic() 599 dev_err(mxs_dma->dma_device.dev, in mxs_dma_prep_dma_cyclic() 729 dma_cap_mask_t mask = mxs_dma->dma_device.cap_mask; in mxs_dma_xlate() [all …]
|
D | dmaengine.c | 69 static void dmaengine_debug_register(struct dma_device *dma_dev) in dmaengine_debug_register() 77 static void dmaengine_debug_unregister(struct dma_device *dma_dev) in dmaengine_debug_unregister() 84 struct dma_device *dma_dev) in dmaengine_dbg_summary_show() 104 struct dma_device *dma_dev = NULL; in dmaengine_summary_show() 136 static inline int dmaengine_debug_register(struct dma_device *dma_dev) in dmaengine_debug_register() 141 static inline void dmaengine_debug_unregister(struct dma_device *dma_dev) { } in dmaengine_debug_unregister() 322 struct dma_device *device; in min_chan() 364 struct dma_device *device; in dma_channel_rebalance() 392 static int dma_device_satisfies_mask(struct dma_device *device, in dma_device_satisfies_mask() 425 struct dma_device *device = container_of(ref, struct dma_device, ref); in dma_device_release() [all …]
|
D | st_fdma.c | 177 if (fdev->dma_device.dev->of_node != dma_spec->np) in st_fdma_of_xlate() 186 chan = dma_get_any_slave_channel(&fdev->dma_device); in st_fdma_of_xlate() 793 INIT_LIST_HEAD(&fdev->dma_device.channels); in st_fdma_probe() 799 vchan_init(&fchan->vchan, &fdev->dma_device); in st_fdma_probe() 805 dma_cap_set(DMA_SLAVE, fdev->dma_device.cap_mask); in st_fdma_probe() 806 dma_cap_set(DMA_CYCLIC, fdev->dma_device.cap_mask); in st_fdma_probe() 807 dma_cap_set(DMA_MEMCPY, fdev->dma_device.cap_mask); in st_fdma_probe() 809 fdev->dma_device.dev = &pdev->dev; in st_fdma_probe() 810 fdev->dma_device.device_alloc_chan_resources = st_fdma_alloc_chan_res; in st_fdma_probe() 811 fdev->dma_device.device_free_chan_resources = st_fdma_free_chan_res; in st_fdma_probe() [all …]
|
D | imx-dma.c | 176 struct dma_device dma_device; member 1034 return dma_request_channel(imxdma->dma_device.cap_mask, in imxdma_xlate() 1112 INIT_LIST_HEAD(&imxdma->dma_device.channels); in imxdma_probe() 1114 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); in imxdma_probe() 1115 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); in imxdma_probe() 1116 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); in imxdma_probe() 1117 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); in imxdma_probe() 1150 imxdmac->chan.device = &imxdma->dma_device; in imxdma_probe() 1156 &imxdma->dma_device.channels); in imxdma_probe() 1159 imxdma->dma_device.dev = &pdev->dev; in imxdma_probe() [all …]
|
D | dmaengine.h | 183 struct dma_chan *dma_get_any_slave_channel(struct dma_device *device); 189 dmaengine_get_debugfs_root(struct dma_device *dma_dev) { in dmaengine_get_debugfs_root() 195 dmaengine_get_debugfs_root(struct dma_device *dma_dev) in dmaengine_get_debugfs_root()
|
D | imx-sdma.c | 524 struct dma_device dma_device; member 2141 dma_cap_mask_t mask = sdma->dma_device.cap_mask; in sdma_xlate() 2232 dma_cap_set(DMA_SLAVE, sdma->dma_device.cap_mask); in sdma_probe() 2233 dma_cap_set(DMA_CYCLIC, sdma->dma_device.cap_mask); in sdma_probe() 2234 dma_cap_set(DMA_MEMCPY, sdma->dma_device.cap_mask); in sdma_probe() 2236 INIT_LIST_HEAD(&sdma->dma_device.channels); in sdma_probe() 2254 vchan_init(&sdmac->vc, &sdma->dma_device); in sdma_probe() 2268 sdma->dma_device.dev = &pdev->dev; in sdma_probe() 2270 sdma->dma_device.device_alloc_chan_resources = sdma_alloc_chan_resources; in sdma_probe() 2271 sdma->dma_device.device_free_chan_resources = sdma_free_chan_resources; in sdma_probe() [all …]
|
D | dma-jz4780.c | 150 struct dma_device dma_device; member 181 dma_device); in jz4780_dma_chan_parent() 810 dma_cap_mask_t mask = jzdma->dma_device.cap_mask; in jz4780_of_dma_xlate() 827 dev_err(jzdma->dma_device.dev, in jz4780_of_dma_xlate() 835 dev_err(jzdma->dma_device.dev, in jz4780_of_dma_xlate() 858 struct dma_device *dd; in jz4780_dma_probe() 913 dd = &jzdma->dma_device; in jz4780_dma_probe()
|
D | idma64.h | 184 struct dma_device dma; 193 static inline struct idma64 *to_idma64(struct dma_device *ddev) in to_idma64()
|
D | st_fdma.h | 133 struct dma_device dma_device; member
|
D | fsl-edma-common.h | 157 struct dma_device dma_dev; 251 void fsl_edma_cleanup_vchan(struct dma_device *dmadev);
|
/linux-6.1.9/include/linux/ |
D | dmaengine.h | 329 struct dma_device *device; 851 struct dma_device { struct 940 void (*device_release)(struct dma_device *dev); argument 942 void (*dbg_summary_show)(struct seq_file *s, struct dma_device *dev); argument 1222 static inline bool is_dma_copy_aligned(struct dma_device *dev, size_t off1, in is_dma_copy_aligned() 1228 static inline bool is_dma_xor_aligned(struct dma_device *dev, size_t off1, in is_dma_xor_aligned() 1234 static inline bool is_dma_pq_aligned(struct dma_device *dev, size_t off1, in is_dma_pq_aligned() 1240 static inline bool is_dma_fill_aligned(struct dma_device *dev, size_t off1, in is_dma_fill_aligned() 1247 dma_set_maxpq(struct dma_device *dma, int maxpq, int has_pq_continue) in dma_set_maxpq() 1266 static inline bool dma_dev_has_pq_continue(struct dma_device *dma) in dma_dev_has_pq_continue() [all …]
|
/linux-6.1.9/drivers/dma/ioat/ |
D | sysfs.c | 19 struct dma_device *dma = c->device; in cap_show() 33 struct dma_device *dma = c->device; in version_show() 77 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_kobject_add() 98 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_kobject_del()
|
D | init.c | 305 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_self_test() 496 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_probe() 552 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_remove() 569 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_enumerate_channels() 769 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_init_channel() 801 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_xor_val_self_test() 1063 struct dma_device *dma; in ioat_intr_quirk() 1091 struct dma_device *dma; in ioat3_dma_probe() 1323 static void release_ioatdma(struct dma_device *device) in release_ioatdma()
|
/linux-6.1.9/drivers/dma/hsu/ |
D | hsu.h | 115 struct dma_device dma; 122 static inline struct hsu_dma *to_hsu_dma(struct dma_device *ddev) in to_hsu_dma()
|
/linux-6.1.9/drivers/dma/dw-edma/ |
D | dw-edma-core.h | 101 struct dma_device wr_edma; 104 struct dma_device rd_edma;
|
/linux-6.1.9/crypto/async_tx/ |
D | async_xor.c | 26 struct dma_device *dma = chan->device; in do_async_xor() 141 dma_xor_aligned_offsets(struct dma_device *device, unsigned int offset, in dma_xor_aligned_offsets() 189 struct dma_device *device = chan ? chan->device : NULL; in async_xor_offs() 321 struct dma_device *device = chan ? chan->device : NULL; in async_xor_val_offs()
|
D | async_tx.c | 69 struct dma_device *device = chan->device; in async_tx_channel_switch() 224 struct dma_device *device; in async_trigger_callback()
|
D | async_pq.c | 42 struct dma_device *dma = chan->device; in do_async_gen_syndrome() 143 is_dma_pq_aligned_offs(struct dma_device *dev, unsigned int *offs, in is_dma_pq_aligned_offs() 184 struct dma_device *device = chan ? chan->device : NULL; in async_gen_syndrome() 303 struct dma_device *device = chan ? chan->device : NULL; in async_syndrome_val()
|
/linux-6.1.9/drivers/dma/qcom/ |
D | gpi.c | 468 struct dma_device dma_device; member 2201 INIT_LIST_HEAD(&gpi_dev->dma_device.channels); in gpi_probe() 2234 vchan_init(&gchan->vc, &gpi_dev->dma_device); in gpi_probe() 2253 dma_cap_zero(gpi_dev->dma_device.cap_mask); in gpi_probe() 2254 dma_cap_set(DMA_SLAVE, gpi_dev->dma_device.cap_mask); in gpi_probe() 2257 gpi_dev->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in gpi_probe() 2258 gpi_dev->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in gpi_probe() 2259 gpi_dev->dma_device.src_addr_widths = DMA_SLAVE_BUSWIDTH_8_BYTES; in gpi_probe() 2260 gpi_dev->dma_device.dst_addr_widths = DMA_SLAVE_BUSWIDTH_8_BYTES; in gpi_probe() 2261 gpi_dev->dma_device.device_alloc_chan_resources = gpi_alloc_chan_resources; in gpi_probe() [all …]
|
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/fpga/ |
D | conn.c | 51 struct device *dma_device; in mlx5_fpga_conn_map_buf() local 57 dma_device = mlx5_core_dma_dev(conn->fdev->mdev); in mlx5_fpga_conn_map_buf() 58 buf->sg[0].dma_addr = dma_map_single(dma_device, buf->sg[0].data, in mlx5_fpga_conn_map_buf() 60 err = dma_mapping_error(dma_device, buf->sg[0].dma_addr); in mlx5_fpga_conn_map_buf() 70 buf->sg[1].dma_addr = dma_map_single(dma_device, buf->sg[1].data, in mlx5_fpga_conn_map_buf() 72 err = dma_mapping_error(dma_device, buf->sg[1].dma_addr); in mlx5_fpga_conn_map_buf() 75 dma_unmap_single(dma_device, buf->sg[0].dma_addr, in mlx5_fpga_conn_map_buf() 87 struct device *dma_device; in mlx5_fpga_conn_unmap_buf() local 89 dma_device = mlx5_core_dma_dev(conn->fdev->mdev); in mlx5_fpga_conn_unmap_buf() 91 dma_unmap_single(dma_device, buf->sg[1].dma_addr, in mlx5_fpga_conn_unmap_buf() [all …]
|
/linux-6.1.9/drivers/dma/idxd/ |
D | dma.c | 195 static void idxd_dma_release(struct dma_device *device) in idxd_dma_release() 205 struct dma_device *dma; in idxd_register_dma_device() 256 struct dma_device *dma = &idxd->idxd_dma->dma; in idxd_register_dma_channel()
|
/linux-6.1.9/drivers/infiniband/hw/hns/ |
D | hns_roce_db.c | 71 struct device *dma_device) in hns_roce_alloc_db_pgdir() argument 83 pgdir->page = dma_alloc_coherent(dma_device, PAGE_SIZE, in hns_roce_alloc_db_pgdir()
|
/linux-6.1.9/include/linux/platform_data/ |
D | dma-iop32x.h | 37 struct dma_device common;
|
/linux-6.1.9/drivers/dma/sf-pdma/ |
D | sf-pdma.h | 113 struct dma_device dma_dev;
|
/linux-6.1.9/include/rdma/ |
D | ib_verbs.h | 2692 struct device *dma_device; member 2855 struct device *dma_device); 4013 return IS_ENABLED(CONFIG_INFINIBAND_VIRT_DMA) && !dev->dma_device; in ib_uses_virt_dma() 4024 return dma_pci_p2pdma_supported(dev->dma_device); in ib_dma_pci_p2p_dma_supported() 4036 return dma_mapping_error(dev->dma_device, dma_addr); in ib_dma_mapping_error() 4052 return dma_map_single(dev->dma_device, cpu_addr, size, direction); in ib_dma_map_single() 4067 dma_unmap_single(dev->dma_device, addr, size, direction); in ib_dma_unmap_single() 4086 return dma_map_page(dev->dma_device, page, offset, size, direction); in ib_dma_map_page() 4101 dma_unmap_page(dev->dma_device, addr, size, direction); in ib_dma_unmap_page() 4112 return dma_map_sg_attrs(dev->dma_device, sg, nents, direction, in ib_dma_map_sg_attrs() [all …]
|