Searched refs:iov_offset (Results 1 – 17 of 17) sorted by relevance
/linux/lib/ |
A D | iov_iter.c | 190 skip = i->iov_offset; in copy_page_to_iter_iovec() 253 i->iov_offset = skip; in copy_page_to_iter_iovec() 352 if (i->iov_offset) { in sanity() 403 off = i->iov_offset; in copy_page_to_iter_pipe() 520 .iov_offset = 0, in iov_iter_init() 1182 .iov_offset = 0, in iov_iter_kvec() 1198 .iov_offset = 0, in iov_iter_bvec() 1216 .iov_offset = 0, in iov_iter_pipe() 1245 .iov_offset = 0 in iov_iter_xarray() 1266 .iov_offset = 0 in iov_iter_discard() [all …]
|
/linux/include/linux/ |
A D | uio.h | 31 size_t iov_offset; member 40 size_t iov_offset; member 67 state->iov_offset = iter->iov_offset; in iov_iter_save_state() 127 .iov_base = iter->iov->iov_base + iter->iov_offset, in iov_iter_iovec() 129 iter->iov->iov_len - iter->iov_offset), in iov_iter_iovec()
|
/linux/drivers/net/ethernet/google/gve/ |
A D | gve_tx.c | 96 iov[0].iov_offset = fifo->head; in gve_tx_alloc_fifo() 107 iov[1].iov_offset = 0; /* Start of fifo*/ in gve_tx_alloc_fifo() 414 u64 iov_offset, u64 iov_len) in gve_dma_sync_for_device() argument 416 u64 last_page = (iov_offset + iov_len - 1) / PAGE_SIZE; in gve_dma_sync_for_device() 417 u64 first_page = iov_offset / PAGE_SIZE; in gve_dma_sync_for_device() 461 info->iov[hdr_nfrags - 1].iov_offset); in gve_tx_add_skb_copy() 464 tx->tx_fifo.base + info->iov[hdr_nfrags - 1].iov_offset, in gve_tx_add_skb_copy() 467 info->iov[hdr_nfrags - 1].iov_offset, in gve_tx_add_skb_copy() 477 info->iov[i].iov_offset); in gve_tx_add_skb_copy() 480 tx->tx_fifo.base + info->iov[i].iov_offset, in gve_tx_add_skb_copy() [all …]
|
A D | gve.h | 237 u32 iov_offset; /* offset into this segment */ member
|
/linux/drivers/infiniband/hw/hfi1/ |
A D | user_sdma.c | 710 u64 iov_offset = *iov_offset_ptr; in user_sdma_txadd() local 715 offset = offset_in_page(base + iovec->offset + iov_offset); in user_sdma_txadd() 716 pageidx = (((iovec->offset + iov_offset + base) - (base & PAGE_MASK)) >> in user_sdma_txadd() 727 iov_offset += len; in user_sdma_txadd() 732 iovec->offset += iov_offset; in user_sdma_txadd() 734 iov_offset = 0; in user_sdma_txadd() 739 *iov_offset_ptr = iov_offset; in user_sdma_txadd() 775 u64 iov_offset = 0; in user_sdma_send_pkts() local 875 &queued, &data_sent, &iov_offset); in user_sdma_send_pkts() 888 iovec->offset += iov_offset; in user_sdma_send_pkts()
|
/linux/include/trace/events/ |
A D | afs.h | 965 __entry->offset = msg->msg_iter.xarray_start + msg->msg_iter.iov_offset; 989 __entry->offset = msg->msg_iter.xarray_start + msg->msg_iter.iov_offset;
|
/linux/net/9p/ |
A D | trans_virtio.c | 351 p = data->kvec->iov_base + data->iov_offset; in p9_get_mapped_pages()
|
/linux/block/ |
A D | bio.c | 1064 bio->bi_iter.bi_bvec_done = iter->iov_offset; in bio_iov_bvec_set()
|
/linux/fs/ |
A D | splice.c | 317 to.iov_offset = 0; in generic_file_splice_read()
|
A D | io_uring.c | 3060 iter->iov_offset = offset & ~PAGE_MASK; in __io_import_fixed()
|
/linux/drivers/nvme/host/ |
A D | tcp.c | 201 return req->iter.bvec->bv_offset + req->iter.iov_offset; in nvme_tcp_req_cur_offset() 251 req->iter.iov_offset = offset; in nvme_tcp_init_iter()
|
/linux/fs/gfs2/ |
A D | file.c | 797 p = i->iov[0].iov_base + i->iov_offset; in should_fault_in_pages()
|
/linux/net/ceph/ |
A D | messenger_v2.c | 152 bv.bv_offset = it->bvec->bv_offset + it->iov_offset; in do_try_sendpage() 154 it->bvec->bv_len - it->iov_offset); in do_try_sendpage()
|
/linux/drivers/block/ |
A D | loop.c | 442 iter.iov_offset = offset; in lo_rw_aio()
|
/linux/fs/afs/ |
A D | dir.c | 197 req->iter->iov_offset, iov_iter_count(req->iter)); in afs_dir_dump()
|
/linux/fs/cifs/ |
A D | file.c | 3022 from->iov_offset, from->count); in cifs_write_from_iter() 3754 direct_iov.iov_offset, in cifs_send_async_read()
|
/linux/fs/fuse/ |
A D | file.c | 1361 return (unsigned long)ii->iov->iov_base + ii->iov_offset; in fuse_get_user_addr()
|
Completed in 75 milliseconds