Home
last modified time | relevance | path

Searched refs:iov_offset (Results 1 – 16 of 16) sorted by relevance

/linux-6.1.9/lib/
Diov_iter.c23 base = __p + i->iov_offset; \
25 i->iov_offset += len; \
32 size_t skip = i->iov_offset; \
47 i->iov_offset = skip; \
53 unsigned skip = i->iov_offset; \
75 i->iov_offset = skip; \
83 loff_t start = i->xarray_start + i->iov_offset; \
115 i->iov_offset += __off; \
356 n -= fault_in_readable(i->ubuf + i->iov_offset, n); in fault_in_iov_iter_readable()
364 for (p = i->iov, skip = i->iov_offset; count; p++, skip = 0) { in fault_in_iov_iter_readable()
[all …]
/linux-6.1.9/include/linux/
Duio.h33 size_t iov_offset; member
44 size_t iov_offset; member
74 state->iov_offset = iter->iov_offset; in iov_iter_save_state()
144 .iov_base = iter->iov->iov_base + iter->iov_offset, in iov_iter_iovec()
146 iter->iov->iov_len - iter->iov_offset), in iov_iter_iovec()
/linux-6.1.9/drivers/net/ethernet/google/gve/
Dgve_tx.c96 iov[0].iov_offset = fifo->head; in gve_tx_alloc_fifo()
107 iov[1].iov_offset = 0; /* Start of fifo*/ in gve_tx_alloc_fifo()
430 u64 iov_offset, u64 iov_len) in gve_dma_sync_for_device() argument
432 u64 last_page = (iov_offset + iov_len - 1) / PAGE_SIZE; in gve_dma_sync_for_device()
433 u64 first_page = iov_offset / PAGE_SIZE; in gve_dma_sync_for_device()
478 info->iov[hdr_nfrags - 1].iov_offset); in gve_tx_add_skb_copy()
481 tx->tx_fifo.base + info->iov[hdr_nfrags - 1].iov_offset, in gve_tx_add_skb_copy()
484 info->iov[hdr_nfrags - 1].iov_offset, in gve_tx_add_skb_copy()
499 info->iov[i].iov_offset); in gve_tx_add_skb_copy()
502 tx->tx_fifo.base + info->iov[i].iov_offset, in gve_tx_add_skb_copy()
[all …]
Dgve.h238 u32 iov_offset; /* offset into this segment */ member
/linux-6.1.9/drivers/infiniband/hw/hfi1/
Duser_sdma.c708 u64 iov_offset = *iov_offset_ptr; in user_sdma_txadd() local
713 offset = offset_in_page(base + iovec->offset + iov_offset); in user_sdma_txadd()
714 pageidx = (((iovec->offset + iov_offset + base) - (base & PAGE_MASK)) >> in user_sdma_txadd()
725 iov_offset += len; in user_sdma_txadd()
730 iovec->offset += iov_offset; in user_sdma_txadd()
732 iov_offset = 0; in user_sdma_txadd()
737 *iov_offset_ptr = iov_offset; in user_sdma_txadd()
773 u64 iov_offset = 0; in user_sdma_send_pkts() local
873 &queued, &data_sent, &iov_offset); in user_sdma_send_pkts()
886 iovec->offset += iov_offset; in user_sdma_send_pkts()
/linux-6.1.9/net/9p/
Dtrans_virtio.c351 p = data->kvec->iov_base + data->iov_offset; in p9_get_mapped_pages()
/linux-6.1.9/include/trace/events/
Dafs.h965 __entry->offset = msg->msg_iter.xarray_start + msg->msg_iter.iov_offset;
989 __entry->offset = msg->msg_iter.xarray_start + msg->msg_iter.iov_offset;
/linux-6.1.9/io_uring/
Dnet.c1025 bi.bi_bvec_done = from->iov_offset; in io_sg_from_iter()
1044 from->iov_offset = bi.bi_bvec_done; in io_sg_from_iter()
Drsrc.c1362 iter->iov_offset = offset & ~PAGE_MASK; in io_import_fixed()
/linux-6.1.9/block/
Dbio.c1145 bio->bi_iter.bi_bvec_done = iter->iov_offset; in bio_iov_bvec_set()
/linux-6.1.9/drivers/nvme/host/
Dtcp.c242 return req->iter.bvec->bv_offset + req->iter.iov_offset; in nvme_tcp_req_cur_offset()
292 req->iter.iov_offset = offset; in nvme_tcp_init_iter()
/linux-6.1.9/net/ceph/
Dmessenger_v2.c153 bv.bv_offset = it->bvec->bv_offset + it->iov_offset; in do_try_sendpage()
155 it->bvec->bv_len - it->iov_offset); in do_try_sendpage()
/linux-6.1.9/drivers/block/
Dloop.c443 iter.iov_offset = offset; in lo_rw_aio()
/linux-6.1.9/fs/afs/
Ddir.c198 req->iter->iov_offset, iov_iter_count(req->iter)); in afs_dir_dump()
/linux-6.1.9/fs/cifs/
Dfile.c3290 from->iov_offset, from->count); in cifs_write_from_iter()
4032 direct_iov.iov_offset, in cifs_send_async_read()
/linux-6.1.9/fs/fuse/
Dfile.c1383 return (unsigned long)ii->iov->iov_base + ii->iov_offset; in fuse_get_user_addr()