Lines Matching refs:umem_dmabuf

15 int ib_umem_dmabuf_map_pages(struct ib_umem_dmabuf *umem_dmabuf)  in ib_umem_dmabuf_map_pages()  argument
24 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages()
26 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages()
29 sgt = dma_buf_map_attachment(umem_dmabuf->attach, DMA_BIDIRECTIONAL); in ib_umem_dmabuf_map_pages()
35 start = ALIGN_DOWN(umem_dmabuf->umem.address, PAGE_SIZE); in ib_umem_dmabuf_map_pages()
36 end = ALIGN(umem_dmabuf->umem.address + umem_dmabuf->umem.length, in ib_umem_dmabuf_map_pages()
44 umem_dmabuf->first_sg = sg; in ib_umem_dmabuf_map_pages()
45 umem_dmabuf->first_sg_offset = offset; in ib_umem_dmabuf_map_pages()
53 umem_dmabuf->last_sg = sg; in ib_umem_dmabuf_map_pages()
54 umem_dmabuf->last_sg_trim = trim; in ib_umem_dmabuf_map_pages()
61 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg; in ib_umem_dmabuf_map_pages()
62 umem_dmabuf->umem.sgt_append.sgt.nents = nmap; in ib_umem_dmabuf_map_pages()
63 umem_dmabuf->sgt = sgt; in ib_umem_dmabuf_map_pages()
71 ret = dma_resv_wait_timeout(umem_dmabuf->attach->dmabuf->resv, in ib_umem_dmabuf_map_pages()
82 void ib_umem_dmabuf_unmap_pages(struct ib_umem_dmabuf *umem_dmabuf) in ib_umem_dmabuf_unmap_pages() argument
84 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_unmap_pages()
86 if (!umem_dmabuf->sgt) in ib_umem_dmabuf_unmap_pages()
90 if (umem_dmabuf->first_sg) { in ib_umem_dmabuf_unmap_pages()
91 sg_dma_address(umem_dmabuf->first_sg) -= in ib_umem_dmabuf_unmap_pages()
92 umem_dmabuf->first_sg_offset; in ib_umem_dmabuf_unmap_pages()
93 sg_dma_len(umem_dmabuf->first_sg) += in ib_umem_dmabuf_unmap_pages()
94 umem_dmabuf->first_sg_offset; in ib_umem_dmabuf_unmap_pages()
95 umem_dmabuf->first_sg = NULL; in ib_umem_dmabuf_unmap_pages()
96 umem_dmabuf->first_sg_offset = 0; in ib_umem_dmabuf_unmap_pages()
98 if (umem_dmabuf->last_sg) { in ib_umem_dmabuf_unmap_pages()
99 sg_dma_len(umem_dmabuf->last_sg) += in ib_umem_dmabuf_unmap_pages()
100 umem_dmabuf->last_sg_trim; in ib_umem_dmabuf_unmap_pages()
101 umem_dmabuf->last_sg = NULL; in ib_umem_dmabuf_unmap_pages()
102 umem_dmabuf->last_sg_trim = 0; in ib_umem_dmabuf_unmap_pages()
105 dma_buf_unmap_attachment(umem_dmabuf->attach, umem_dmabuf->sgt, in ib_umem_dmabuf_unmap_pages()
108 umem_dmabuf->sgt = NULL; in ib_umem_dmabuf_unmap_pages()
118 struct ib_umem_dmabuf *umem_dmabuf; in ib_umem_dmabuf_get() local
136 umem_dmabuf = kzalloc(sizeof(*umem_dmabuf), GFP_KERNEL); in ib_umem_dmabuf_get()
137 if (!umem_dmabuf) { in ib_umem_dmabuf_get()
142 umem = &umem_dmabuf->umem; in ib_umem_dmabuf_get()
152 umem_dmabuf->attach = dma_buf_dynamic_attach( in ib_umem_dmabuf_get()
156 umem_dmabuf); in ib_umem_dmabuf_get()
157 if (IS_ERR(umem_dmabuf->attach)) { in ib_umem_dmabuf_get()
158 ret = ERR_CAST(umem_dmabuf->attach); in ib_umem_dmabuf_get()
161 return umem_dmabuf; in ib_umem_dmabuf_get()
164 kfree(umem_dmabuf); in ib_umem_dmabuf_get()
175 struct ib_umem_dmabuf *umem_dmabuf = attach->importer_priv; in ib_umem_dmabuf_unsupported_move_notify() local
177 ibdev_warn_ratelimited(umem_dmabuf->umem.ibdev, in ib_umem_dmabuf_unsupported_move_notify()
191 struct ib_umem_dmabuf *umem_dmabuf; in ib_umem_dmabuf_get_pinned() local
194 umem_dmabuf = ib_umem_dmabuf_get(device, offset, size, fd, access, in ib_umem_dmabuf_get_pinned()
196 if (IS_ERR(umem_dmabuf)) in ib_umem_dmabuf_get_pinned()
197 return umem_dmabuf; in ib_umem_dmabuf_get_pinned()
199 dma_resv_lock(umem_dmabuf->attach->dmabuf->resv, NULL); in ib_umem_dmabuf_get_pinned()
200 err = dma_buf_pin(umem_dmabuf->attach); in ib_umem_dmabuf_get_pinned()
203 umem_dmabuf->pinned = 1; in ib_umem_dmabuf_get_pinned()
205 err = ib_umem_dmabuf_map_pages(umem_dmabuf); in ib_umem_dmabuf_get_pinned()
208 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned()
210 return umem_dmabuf; in ib_umem_dmabuf_get_pinned()
213 dma_buf_unpin(umem_dmabuf->attach); in ib_umem_dmabuf_get_pinned()
215 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned()
216 ib_umem_release(&umem_dmabuf->umem); in ib_umem_dmabuf_get_pinned()
221 void ib_umem_dmabuf_release(struct ib_umem_dmabuf *umem_dmabuf) in ib_umem_dmabuf_release() argument
223 struct dma_buf *dmabuf = umem_dmabuf->attach->dmabuf; in ib_umem_dmabuf_release()
226 ib_umem_dmabuf_unmap_pages(umem_dmabuf); in ib_umem_dmabuf_release()
227 if (umem_dmabuf->pinned) in ib_umem_dmabuf_release()
228 dma_buf_unpin(umem_dmabuf->attach); in ib_umem_dmabuf_release()
231 dma_buf_detach(dmabuf, umem_dmabuf->attach); in ib_umem_dmabuf_release()
233 kfree(umem_dmabuf); in ib_umem_dmabuf_release()