Home
last modified time | relevance | path

Searched refs:dma_buf (Results 1 – 25 of 132) sorted by relevance

123456

/linux-5.19.10/include/linux/
Ddma-buf.h27 struct dma_buf;
72 int (*attach)(struct dma_buf *, struct dma_buf_attachment *);
83 void (*detach)(struct dma_buf *, struct dma_buf_attachment *);
203 void (*release)(struct dma_buf *);
230 int (*begin_cpu_access)(struct dma_buf *, enum dma_data_direction);
247 int (*end_cpu_access)(struct dma_buf *, enum dma_data_direction);
284 int (*mmap)(struct dma_buf *, struct vm_area_struct *vma);
286 int (*vmap)(struct dma_buf *dmabuf, struct iosys_map *map);
287 void (*vunmap)(struct dma_buf *dmabuf, struct iosys_map *map);
302 struct dma_buf { struct
[all …]
Dvirtio_dma_buf.h24 int (*device_attach)(struct dma_buf *dma_buf,
26 int (*get_uuid)(struct dma_buf *dma_buf, uuid_t *uuid);
29 int virtio_dma_buf_attach(struct dma_buf *dma_buf,
32 struct dma_buf *virtio_dma_buf_export
34 bool is_virtio_dma_buf(struct dma_buf *dma_buf);
35 int virtio_dma_buf_get_uuid(struct dma_buf *dma_buf, uuid_t *uuid);
/linux-5.19.10/drivers/gpu/drm/
Ddrm_prime.c94 struct dma_buf *dma_buf; member
102 struct dma_buf *dma_buf, uint32_t handle) in drm_prime_add_buf_handle() argument
111 get_dma_buf(dma_buf); in drm_prime_add_buf_handle()
112 member->dma_buf = dma_buf; in drm_prime_add_buf_handle()
122 if (dma_buf > pos->dma_buf) in drm_prime_add_buf_handle()
148 static struct dma_buf *drm_prime_lookup_buf_by_handle(struct drm_prime_file_private *prime_fpriv, in drm_prime_lookup_buf_by_handle()
159 return member->dma_buf; in drm_prime_lookup_buf_by_handle()
170 struct dma_buf *dma_buf, in drm_prime_lookup_buf_handle() argument
180 if (member->dma_buf == dma_buf) { in drm_prime_lookup_buf_handle()
183 } else if (member->dma_buf < dma_buf) { in drm_prime_lookup_buf_handle()
[all …]
/linux-5.19.10/include/drm/
Ddrm_prime.h55 struct dma_buf;
66 struct dma_buf *drm_gem_dmabuf_export(struct drm_device *dev,
68 void drm_gem_dmabuf_release(struct dma_buf *dma_buf);
77 int drm_gem_map_attach(struct dma_buf *dma_buf,
79 void drm_gem_map_detach(struct dma_buf *dma_buf,
86 int drm_gem_dmabuf_vmap(struct dma_buf *dma_buf, struct iosys_map *map);
87 void drm_gem_dmabuf_vunmap(struct dma_buf *dma_buf, struct iosys_map *map);
90 int drm_gem_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *vma);
94 struct dma_buf *drm_gem_prime_export(struct drm_gem_object *obj,
101 struct dma_buf *dma_buf,
[all …]
/linux-5.19.10/drivers/virtio/
Dvirtio_dma_buf.c20 struct dma_buf *virtio_dma_buf_export in virtio_dma_buf_export()
40 int virtio_dma_buf_attach(struct dma_buf *dma_buf, in virtio_dma_buf_attach() argument
45 container_of(dma_buf->ops, in virtio_dma_buf_attach()
49 ret = ops->device_attach(dma_buf, attach); in virtio_dma_buf_attach()
61 bool is_virtio_dma_buf(struct dma_buf *dma_buf) in is_virtio_dma_buf() argument
63 return dma_buf->ops->attach == &virtio_dma_buf_attach; in is_virtio_dma_buf()
74 int virtio_dma_buf_get_uuid(struct dma_buf *dma_buf, in virtio_dma_buf_get_uuid() argument
78 container_of(dma_buf->ops, in virtio_dma_buf_get_uuid()
81 if (!is_virtio_dma_buf(dma_buf)) in virtio_dma_buf_get_uuid()
84 return ops->get_uuid(dma_buf, uuid); in virtio_dma_buf_get_uuid()
/linux-5.19.10/drivers/gpu/drm/i915/gem/
Di915_gem_dmabuf.c23 static struct drm_i915_gem_object *dma_buf_to_obj(struct dma_buf *buf)
69 static int i915_gem_dmabuf_vmap(struct dma_buf *dma_buf, in i915_gem_dmabuf_vmap() argument
72 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_vmap()
84 static void i915_gem_dmabuf_vunmap(struct dma_buf *dma_buf, in i915_gem_dmabuf_vunmap() argument
87 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_vunmap()
93 static int i915_gem_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *vma) in i915_gem_dmabuf_mmap() argument
95 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_mmap()
117 static int i915_gem_begin_cpu_access(struct dma_buf *dma_buf, enum dma_data_direction direction) in i915_gem_begin_cpu_access() argument
119 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_begin_cpu_access()
142 static int i915_gem_end_cpu_access(struct dma_buf *dma_buf, enum dma_data_direction direction) in i915_gem_end_cpu_access() argument
[all …]
Di915_gem_dmabuf.h11 struct dma_buf;
14 struct dma_buf *dma_buf);
16 struct dma_buf *i915_gem_prime_export(struct drm_gem_object *gem_obj, int flags);
/linux-5.19.10/drivers/gpu/drm/amd/amdgpu/
Damdgpu_dma_buf.c53 static int amdgpu_dma_buf_attach(struct dma_buf *dmabuf, in amdgpu_dma_buf_attach()
83 static void amdgpu_dma_buf_detach(struct dma_buf *dmabuf, in amdgpu_dma_buf_detach()
141 struct dma_buf *dma_buf = attach->dmabuf; in amdgpu_dma_buf_map() local
142 struct drm_gem_object *obj = dma_buf->priv; in amdgpu_dma_buf_map()
234 static int amdgpu_dma_buf_begin_cpu_access(struct dma_buf *dma_buf, in amdgpu_dma_buf_begin_cpu_access() argument
237 struct amdgpu_bo *bo = gem_to_amdgpu_bo(dma_buf->priv); in amdgpu_dma_buf_begin_cpu_access()
287 struct dma_buf *amdgpu_gem_prime_export(struct drm_gem_object *gobj, in amdgpu_gem_prime_export()
291 struct dma_buf *buf; in amdgpu_gem_prime_export()
317 amdgpu_dma_buf_create_obj(struct drm_device *dev, struct dma_buf *dma_buf) in amdgpu_dma_buf_create_obj() argument
319 struct dma_resv *resv = dma_buf->resv; in amdgpu_dma_buf_create_obj()
[all …]
Damdgpu_dma_buf.h28 struct dma_buf *amdgpu_gem_prime_export(struct drm_gem_object *gobj,
31 struct dma_buf *dma_buf);
/linux-5.19.10/drivers/gpu/drm/vmwgfx/
Dttm_object.c98 void (*dmabuf_release)(struct dma_buf *dma_buf);
132 static void ttm_prime_dmabuf_release(struct dma_buf *dma_buf);
512 static bool __must_check get_dma_buf_unless_doomed(struct dma_buf *dmabuf) in get_dma_buf_unless_doomed()
534 BUG_ON(prime->dma_buf != NULL); in ttm_prime_refcount_release()
550 static void ttm_prime_dmabuf_release(struct dma_buf *dma_buf) in ttm_prime_dmabuf_release() argument
553 (struct ttm_prime_object *) dma_buf->priv; in ttm_prime_dmabuf_release()
558 tdev->dmabuf_release(dma_buf); in ttm_prime_dmabuf_release()
560 if (prime->dma_buf == dma_buf) in ttm_prime_dmabuf_release()
561 prime->dma_buf = NULL; in ttm_prime_dmabuf_release()
581 struct dma_buf *dma_buf; in ttm_prime_fd_to_handle() local
[all …]
Dvmwgfx_prime.c42 static int vmw_prime_map_attach(struct dma_buf *dma_buf, in vmw_prime_map_attach() argument
48 static void vmw_prime_map_detach(struct dma_buf *dma_buf, in vmw_prime_map_detach() argument
/linux-5.19.10/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c40 static int omap_gem_dmabuf_begin_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_begin_cpu_access()
55 static int omap_gem_dmabuf_end_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_end_cpu_access()
63 static int omap_gem_dmabuf_mmap(struct dma_buf *buffer, in omap_gem_dmabuf_mmap()
85 struct dma_buf *omap_gem_prime_export(struct drm_gem_object *obj, int flags) in omap_gem_prime_export()
103 struct dma_buf *dma_buf) in omap_gem_prime_import() argument
110 if (dma_buf->ops == &omap_dmabuf_ops) { in omap_gem_prime_import()
111 obj = dma_buf->priv; in omap_gem_prime_import()
122 attach = dma_buf_attach(dma_buf, dev->dev); in omap_gem_prime_import()
126 get_dma_buf(dma_buf); in omap_gem_prime_import()
134 obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt); in omap_gem_prime_import()
[all …]
/linux-5.19.10/drivers/gpu/drm/i915/gem/selftests/
Dmock_dmabuf.c53 static void mock_dmabuf_release(struct dma_buf *dma_buf) in mock_dmabuf_release() argument
55 struct mock_dmabuf *mock = to_mock(dma_buf); in mock_dmabuf_release()
64 static int mock_dmabuf_vmap(struct dma_buf *dma_buf, struct iosys_map *map) in mock_dmabuf_vmap() argument
66 struct mock_dmabuf *mock = to_mock(dma_buf); in mock_dmabuf_vmap()
77 static void mock_dmabuf_vunmap(struct dma_buf *dma_buf, struct iosys_map *map) in mock_dmabuf_vunmap() argument
79 struct mock_dmabuf *mock = to_mock(dma_buf); in mock_dmabuf_vunmap()
84 static int mock_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *vma) in mock_dmabuf_mmap() argument
98 static struct dma_buf *mock_dmabuf(int npages) in mock_dmabuf()
102 struct dma_buf *dmabuf; in mock_dmabuf()
/linux-5.19.10/drivers/scsi/lpfc/
Dlpfc_mem.c554 struct hbq_dmabuf *dma_buf; in lpfc_sli4_rb_alloc() local
556 dma_buf = kzalloc(sizeof(struct hbq_dmabuf), GFP_KERNEL); in lpfc_sli4_rb_alloc()
557 if (!dma_buf) in lpfc_sli4_rb_alloc()
560 dma_buf->hbuf.virt = dma_pool_alloc(phba->lpfc_hrb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc()
561 &dma_buf->hbuf.phys); in lpfc_sli4_rb_alloc()
562 if (!dma_buf->hbuf.virt) { in lpfc_sli4_rb_alloc()
563 kfree(dma_buf); in lpfc_sli4_rb_alloc()
566 dma_buf->dbuf.virt = dma_pool_alloc(phba->lpfc_drb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc()
567 &dma_buf->dbuf.phys); in lpfc_sli4_rb_alloc()
568 if (!dma_buf->dbuf.virt) { in lpfc_sli4_rb_alloc()
[all …]
/linux-5.19.10/drivers/dma-buf/
Ddma-buf-sysfs-stats.h16 int dma_buf_stats_setup(struct dma_buf *dmabuf);
18 void dma_buf_stats_teardown(struct dma_buf *dmabuf);
28 static inline int dma_buf_stats_setup(struct dma_buf *dmabuf) in dma_buf_stats_setup()
33 static inline void dma_buf_stats_teardown(struct dma_buf *dmabuf) {} in dma_buf_stats_teardown()
Ddma-buf.c45 struct dma_buf *dmabuf; in dmabuffs_dname()
61 struct dma_buf *dmabuf; in dma_buf_release()
90 struct dma_buf *dmabuf; in dma_buf_file_release()
130 struct dma_buf *dmabuf; in dma_buf_mmap_internal()
151 struct dma_buf *dmabuf; in dma_buf_llseek()
200 struct dma_buf *dmabuf = container_of(dcb->poll, struct dma_buf, poll); in dma_buf_poll_cb()
233 struct dma_buf *dmabuf; in dma_buf_poll()
314 static long dma_buf_set_name(struct dma_buf *dmabuf, const char __user *buf) in dma_buf_set_name()
332 struct dma_buf *dmabuf; in dma_buf_ioctl()
379 struct dma_buf *dmabuf = file->private_data; in dma_buf_show_fdinfo()
[all …]
Ddma-buf-sysfs-stats.c55 ssize_t (*show)(struct dma_buf *dmabuf,
66 struct dma_buf *dmabuf; in dma_buf_stats_attribute_show()
82 static ssize_t exporter_name_show(struct dma_buf *dmabuf, in exporter_name_show()
89 static ssize_t size_show(struct dma_buf *dmabuf, in size_show()
121 void dma_buf_stats_teardown(struct dma_buf *dmabuf) in dma_buf_stats_teardown()
171 int dma_buf_stats_setup(struct dma_buf *dmabuf) in dma_buf_stats_setup()
/linux-5.19.10/drivers/scsi/csiostor/
Dcsio_scsi.c206 struct csio_dma_buf *dma_buf; in csio_scsi_init_cmd_wr() local
222 dma_buf = &req->dma_buf; in csio_scsi_init_cmd_wr()
225 wr->rsp_dmalen = cpu_to_be32(dma_buf->len); in csio_scsi_init_cmd_wr()
226 wr->rsp_dmaaddr = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_cmd_wr()
298 struct csio_dma_buf *dma_buf; in csio_scsi_init_ultptx_dsgl() local
329 dma_buf = (struct csio_dma_buf *)tmp; in csio_scsi_init_ultptx_dsgl()
331 sgl->addr0 = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
333 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl()
336 sge_pair->addr[1] = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
338 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl()
[all …]
/linux-5.19.10/drivers/i2c/busses/
Di2c-qcom-geni.c94 void *dma_buf; member
366 if (gi2c->dma_buf) { in geni_i2c_rx_msg_cleanup()
370 i2c_put_dma_safe_msg_buf(gi2c->dma_buf, cur, !gi2c->err); in geni_i2c_rx_msg_cleanup()
378 if (gi2c->dma_buf) { in geni_i2c_tx_msg_cleanup()
382 i2c_put_dma_safe_msg_buf(gi2c->dma_buf, cur, !gi2c->err); in geni_i2c_tx_msg_cleanup()
391 void *dma_buf; in geni_i2c_rx_one_msg() local
396 dma_buf = i2c_get_dma_safe_msg_buf(msg, 32); in geni_i2c_rx_one_msg()
397 if (dma_buf) in geni_i2c_rx_one_msg()
405 if (dma_buf && geni_se_rx_dma_prep(se, dma_buf, len, &rx_dma)) { in geni_i2c_rx_one_msg()
407 i2c_put_dma_safe_msg_buf(dma_buf, msg, false); in geni_i2c_rx_one_msg()
[all …]
Di2c-stm32.c87 dma->dma_buf = 0; in stm32_i2c_dma_free()
121 dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len, in stm32_i2c_prep_dma_xfer()
123 if (dma_mapping_error(chan_dev, dma->dma_buf)) { in stm32_i2c_prep_dma_xfer()
128 txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf, in stm32_i2c_prep_dma_xfer()
153 dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len, in stm32_i2c_prep_dma_xfer()
/linux-5.19.10/drivers/xen/
Dgntdev-dmabuf.c29 struct dma_buf *dmabuf;
219 static int dmabuf_exp_ops_attach(struct dma_buf *dma_buf, in dmabuf_exp_ops_attach() argument
234 static void dmabuf_exp_ops_detach(struct dma_buf *dma_buf, in dmabuf_exp_ops_detach() argument
326 static void dmabuf_exp_ops_release(struct dma_buf *dma_buf) in dmabuf_exp_ops_release() argument
328 struct gntdev_dmabuf *gntdev_dmabuf = dma_buf->priv; in dmabuf_exp_ops_release()
576 struct dma_buf *dma_buf; in dmabuf_imp_to_refs() local
582 dma_buf = dma_buf_get(fd); in dmabuf_imp_to_refs()
583 if (IS_ERR(dma_buf)) in dmabuf_imp_to_refs()
584 return ERR_CAST(dma_buf); in dmabuf_imp_to_refs()
595 attach = dma_buf_attach(dma_buf, dev); in dmabuf_imp_to_refs()
[all …]
/linux-5.19.10/drivers/dma-buf/heaps/
Dcma_heap.c50 static int cma_heap_attach(struct dma_buf *dmabuf, in cma_heap_attach()
83 static void cma_heap_detach(struct dma_buf *dmabuf, in cma_heap_detach()
121 static int cma_heap_dma_buf_begin_cpu_access(struct dma_buf *dmabuf, in cma_heap_dma_buf_begin_cpu_access()
142 static int cma_heap_dma_buf_end_cpu_access(struct dma_buf *dmabuf, in cma_heap_dma_buf_end_cpu_access()
181 static int cma_heap_mmap(struct dma_buf *dmabuf, struct vm_area_struct *vma) in cma_heap_mmap()
205 static int cma_heap_vmap(struct dma_buf *dmabuf, struct iosys_map *map) in cma_heap_vmap()
232 static void cma_heap_vunmap(struct dma_buf *dmabuf, struct iosys_map *map) in cma_heap_vunmap()
245 static void cma_heap_dma_buf_release(struct dma_buf *dmabuf) in cma_heap_dma_buf_release()
276 static struct dma_buf *cma_heap_allocate(struct dma_heap *heap, in cma_heap_allocate()
288 struct dma_buf *dmabuf; in cma_heap_allocate()
Dsystem_heap.c83 static int system_heap_attach(struct dma_buf *dmabuf, in system_heap_attach()
114 static void system_heap_detach(struct dma_buf *dmabuf, in system_heap_detach()
154 static int system_heap_dma_buf_begin_cpu_access(struct dma_buf *dmabuf, in system_heap_dma_buf_begin_cpu_access()
175 static int system_heap_dma_buf_end_cpu_access(struct dma_buf *dmabuf, in system_heap_dma_buf_end_cpu_access()
196 static int system_heap_mmap(struct dma_buf *dmabuf, struct vm_area_struct *vma) in system_heap_mmap()
244 static int system_heap_vmap(struct dma_buf *dmabuf, struct iosys_map *map) in system_heap_vmap()
272 static void system_heap_vunmap(struct dma_buf *dmabuf, struct iosys_map *map) in system_heap_vunmap()
285 static void system_heap_dma_buf_release(struct dma_buf *dmabuf) in system_heap_dma_buf_release()
335 static struct dma_buf *system_heap_allocate(struct dma_heap *heap, in system_heap_allocate()
344 struct dma_buf *dmabuf; in system_heap_allocate()
/linux-5.19.10/drivers/media/common/videobuf2/
Dvideobuf2-vmalloc.c32 struct dma_buf *dbuf;
208 static int vb2_vmalloc_dmabuf_ops_attach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_attach()
247 static void vb2_vmalloc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_detach()
309 static void vb2_vmalloc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_vmalloc_dmabuf_ops_release()
315 static int vb2_vmalloc_dmabuf_ops_vmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_vmap()
325 static int vb2_vmalloc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_mmap()
341 static struct dma_buf *vb2_vmalloc_get_dmabuf(struct vb2_buffer *vb, in vb2_vmalloc_get_dmabuf()
346 struct dma_buf *dbuf; in vb2_vmalloc_get_dmabuf()
409 struct dma_buf *dbuf, in vb2_vmalloc_attach_dmabuf()
/linux-5.19.10/drivers/mtd/nand/raw/
Dlpc32xx_mlc.c201 uint8_t *dma_buf; member
442 uint8_t *dma_buf; in lpc32xx_read_page() local
446 dma_buf = buf; in lpc32xx_read_page()
449 dma_buf = host->dma_buf; in lpc32xx_read_page()
475 res = lpc32xx_xmit_dma(mtd, dma_buf + i * 512, 512, in lpc32xx_read_page()
494 memcpy(buf, dma_buf, mtd->writesize); in lpc32xx_read_page()
506 uint8_t *dma_buf = (uint8_t *)buf; in lpc32xx_write_page_lowlevel() local
511 dma_buf = host->dma_buf; in lpc32xx_write_page_lowlevel()
512 memcpy(dma_buf, buf, mtd->writesize); in lpc32xx_write_page_lowlevel()
523 res = lpc32xx_xmit_dma(mtd, dma_buf + i * 512, 512, in lpc32xx_write_page_lowlevel()
[all …]

123456