Lines Matching refs:iov

83 void vringh_kiov_advance(struct vringh_kiov *iov, size_t len)  in vringh_kiov_advance()  argument
85 while (len && iov->i < iov->used) { in vringh_kiov_advance()
86 size_t partlen = min(iov->iov[iov->i].iov_len, len); in vringh_kiov_advance()
88 iov->consumed += partlen; in vringh_kiov_advance()
89 iov->iov[iov->i].iov_len -= partlen; in vringh_kiov_advance()
90 iov->iov[iov->i].iov_base += partlen; in vringh_kiov_advance()
92 if (!iov->iov[iov->i].iov_len) { in vringh_kiov_advance()
94 iov->iov[iov->i].iov_len = iov->consumed; in vringh_kiov_advance()
95 iov->iov[iov->i].iov_base -= iov->consumed; in vringh_kiov_advance()
97 iov->consumed = 0; in vringh_kiov_advance()
98 iov->i++; in vringh_kiov_advance()
108 struct vringh_kiov *iov, in vringh_iov_xfer() argument
116 while (len && iov->i < iov->used) { in vringh_iov_xfer()
119 partlen = min(iov->iov[iov->i].iov_len, len); in vringh_iov_xfer()
120 err = xfer(vrh, iov->iov[iov->i].iov_base, ptr, partlen); in vringh_iov_xfer()
126 iov->consumed += partlen; in vringh_iov_xfer()
127 iov->iov[iov->i].iov_len -= partlen; in vringh_iov_xfer()
128 iov->iov[iov->i].iov_base += partlen; in vringh_iov_xfer()
130 if (!iov->iov[iov->i].iov_len) { in vringh_iov_xfer()
132 iov->iov[iov->i].iov_len = iov->consumed; in vringh_iov_xfer()
133 iov->iov[iov->i].iov_base -= iov->consumed; in vringh_iov_xfer()
135 iov->consumed = 0; in vringh_iov_xfer()
136 iov->i++; in vringh_iov_xfer()
218 static int resize_iovec(struct vringh_kiov *iov, gfp_t gfp) in resize_iovec() argument
221 unsigned int flag, new_num = (iov->max_num & ~VRINGH_IOV_ALLOCATED) * 2; in resize_iovec()
226 flag = (iov->max_num & VRINGH_IOV_ALLOCATED); in resize_iovec()
228 new = krealloc_array(iov->iov, new_num, in resize_iovec()
233 memcpy(new, iov->iov, in resize_iovec()
234 iov->max_num * sizeof(struct iovec)); in resize_iovec()
240 iov->iov = new; in resize_iovec()
241 iov->max_num = (new_num | flag); in resize_iovec()
326 struct vringh_kiov *iov; in __vringh_iov() local
374 iov = wiov; in __vringh_iov()
376 iov = riov; in __vringh_iov()
385 if (!iov) { in __vringh_iov()
403 if (unlikely(iov->used == (iov->max_num & ~VRINGH_IOV_ALLOCATED))) { in __vringh_iov()
404 err = resize_iovec(iov, gfp); in __vringh_iov()
409 iov->iov[iov->used].iov_base = addr; in __vringh_iov()
410 iov->iov[iov->used].iov_len = len; in __vringh_iov()
411 iov->used++; in __vringh_iov()
723 BUILD_BUG_ON(offsetof(struct vringh_kiov, iov) != in vringh_getdesc_user()
724 offsetof(struct vringh_iov, iov)); in vringh_getdesc_user()
1111 } iov; member
1150 struct iovec *iovec = ivec->iov.iovec; in iotlb_translate()
1156 struct bio_vec *bvec = ivec->iov.bvec; in iotlb_translate()
1184 } iov; in copy_from_iotlb() local
1187 ivec.iov.iovec = iov.iovec; in copy_from_iotlb()
1204 iov_iter_init(&iter, ITER_SOURCE, ivec.iov.iovec, ret, in copy_from_iotlb()
1207 iov_iter_bvec(&iter, ITER_SOURCE, ivec.iov.bvec, ret, in copy_from_iotlb()
1230 } iov; in copy_to_iotlb() local
1233 ivec.iov.iovec = iov.iovec; in copy_to_iotlb()
1250 iov_iter_init(&iter, ITER_DEST, ivec.iov.iovec, ret, in copy_to_iotlb()
1253 iov_iter_bvec(&iter, ITER_DEST, ivec.iov.bvec, ret, in copy_to_iotlb()
1276 } iov; in getu16_iotlb() local
1280 ivec.iov.iovec = iov.iovec; in getu16_iotlb()
1290 ret = __get_user(tmp, (__virtio16 __user *)ivec.iov.iovec[0].iov_base); in getu16_iotlb()
1294 void *kaddr = kmap_local_page(ivec.iov.bvec[0].bv_page); in getu16_iotlb()
1295 void *from = kaddr + ivec.iov.bvec[0].bv_offset; in getu16_iotlb()
1313 } iov; in putu16_iotlb() local
1317 ivec.iov.iovec = &iov.iovec; in putu16_iotlb()
1329 ret = __put_user(tmp, (__virtio16 __user *)ivec.iov.iovec[0].iov_base); in putu16_iotlb()
1333 void *kaddr = kmap_local_page(ivec.iov.bvec[0].bv_page); in putu16_iotlb()
1334 void *to = kaddr + ivec.iov.bvec[0].bv_offset; in putu16_iotlb()