Home
last modified time | relevance | path

Searched refs:iov_len (Results 1 – 25 of 52) sorted by relevance

123

/drivers/staging/lustre/lnet/klnds/socklnd/
Dsocklnd_lib-linux.c107 nob += scratchiov[i].iov_len; in ksocknal_lib_send_iov()
172 nob += scratchiov[i].iov_len = kiov[i].kiov_len; in ksocknal_lib_send_kiov()
230 nob += scratchiov[i].iov_len; in ksocknal_lib_recv_iov()
248 fragnob = iov[i].iov_len; in ksocknal_lib_recv_iov()
301 iov->iov_len = nob; in ksocknal_lib_kiov_vmap()
339 nob = scratchiov[0].iov_len; in ksocknal_lib_recv_kiov()
344 nob += scratchiov[i].iov_len = kiov[i].kiov_len; in ksocknal_lib_recv_kiov()
400 tx->tx_iov[0].iov_len); in ksocknal_lib_csum_tx()
414 tx->tx_iov[i].iov_len); in ksocknal_lib_csum_tx()
Dsocklnd_cb.c133 if (nob < (int) iov->iov_len) { in ksocknal_send_iov()
135 iov->iov_len -= nob; in ksocknal_send_iov()
139 nob -= iov->iov_len; in ksocknal_send_iov()
282 if (nob < (int)iov->iov_len) { in ksocknal_recv_iov()
283 iov->iov_len -= nob; in ksocknal_recv_iov()
288 nob -= iov->iov_len; in ksocknal_recv_iov()
1059 conn->ksnc_rx_iov[0].iov_len = offsetof(ksock_msg_t, ksm_u); in ksocknal_new_packet()
1070 conn->ksnc_rx_iov[0].iov_len = sizeof (lnet_hdr_t); in ksocknal_new_packet()
1097 conn->ksnc_rx_iov[niov].iov_len = nob; in ksocknal_new_packet()
1222 conn->ksnc_rx_iov[0].iov_len = sizeof(ksock_lnet_msg_t); in ksocknal_process_receive()
Dsocklnd_proto.c722 tx->tx_iov[0].iov_len = sizeof(lnet_hdr_t); in ksocknal_pack_msg_v1()
736 tx->tx_iov[0].iov_len = sizeof(ksock_msg_t); in ksocknal_pack_msg_v2()
741 tx->tx_iov[0].iov_len = offsetof(ksock_msg_t, ksm_u.lnetmsg.ksnm_hdr); in ksocknal_pack_msg_v2()
/drivers/usb/usbip/
Dvhci_tx.c92 iov[0].iov_len = sizeof(pdu_header); in vhci_send_cmd_submit()
98 iov[1].iov_len = urb->transfer_buffer_length; in vhci_send_cmd_submit()
114 iov[2].iov_len = len; in vhci_send_cmd_submit()
184 iov[0].iov_len = sizeof(pdu_header); in vhci_send_cmd_unlink()
Dstub_tx.c209 iov[iovnum].iov_len = sizeof(pdu_header); in stub_send_ret_submit()
218 iov[iovnum].iov_len = urb->actual_length; in stub_send_ret_submit()
236 iov[iovnum].iov_len = in stub_send_ret_submit()
267 iov[iovnum].iov_len = len; in stub_send_ret_submit()
344 iov[0].iov_len = sizeof(pdu_header); in stub_send_ret_unlink()
/drivers/hv/
Dchannel.c611 bufferlist[0].iov_len = sizeof(struct vmpacket_descriptor); in vmbus_sendpacket()
613 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket()
615 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket()
674 bufferlist[0].iov_len = descsize; in vmbus_sendpacket_pagebuffer()
676 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket_pagebuffer()
678 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket_pagebuffer()
737 bufferlist[0].iov_len = descsize; in vmbus_sendpacket_multipagebuffer()
739 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket_multipagebuffer()
741 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket_multipagebuffer()
Dring_buffer.c344 totalbytes_towrite += kv_list[i].iov_len; in hv_ringbuffer_write()
373 kv_list[i].iov_len); in hv_ringbuffer_write()
/drivers/xen/xenbus/
Dxenbus_xs.c279 msg.len += iovec[i].iov_len; in xs_talkv()
290 err = xb_write(iovec[i].iov_base, iovec[i].iov_len); in xs_talkv()
328 iovec.iov_len = strlen(string) + 1; in xs_single()
456 iovec[0].iov_len = strlen(path) + 1; in xenbus_write()
458 iovec[1].iov_len = strlen(string); in xenbus_write()
622 iov[0].iov_len = strlen(path) + 1; in xs_watch()
624 iov[1].iov_len = strlen(token) + 1; in xs_watch()
635 iov[0].iov_len = strlen(path) + 1; in xs_unwatch()
637 iov[1].iov_len = strlen(token) + 1; in xs_unwatch()
/drivers/target/iscsi/
Discsi_target.c753 iov[i].iov_len = cur_len; in iscsit_map_iovec()
1410 iov[iov_count++].iov_len = padding; in iscsit_get_dataout()
1417 iov[iov_count++].iov_len = ISCSI_CRC_LEN; in iscsit_get_dataout()
1668 iov[niov++].iov_len = payload_length; in iscsit_handle_nop_out()
1675 iov[niov++].iov_len = padding; in iscsit_handle_nop_out()
1680 iov[niov++].iov_len = ISCSI_CRC_LEN; in iscsit_handle_nop_out()
2074 iov[niov++].iov_len = payload_length; in iscsit_handle_text_cmd()
2079 iov[niov++].iov_len = padding; in iscsit_handle_text_cmd()
2086 iov[niov++].iov_len = ISCSI_CRC_LEN; in iscsit_handle_text_cmd()
2413 iov[iov_count++].iov_len = padding; in iscsit_handle_immediate_data()
[all …]
Discsi_target_util.c1204 iov.iov_len = tx_hdr_size; in iscsit_fe_sendpage_sg()
1331 int data = count->data_length, rx_loop = 0, total_rx = 0, iov_len; in iscsit_do_rx_data() local
1341 iov_len = count->iov_count; in iscsit_do_rx_data()
1344 rx_loop = kernel_recvmsg(conn->sock, &msg, iov_p, iov_len, in iscsit_do_rx_data()
1363 int ret, iov_len; in iscsit_do_tx_data() local
1378 iov_len = count->iov_count; in iscsit_do_tx_data()
1380 ret = kernel_sendmsg(conn->sock, &msg, iov_p, iov_len, in iscsit_do_tx_data()
/drivers/staging/lustre/lnet/lnet/
Dlib-move.c166 nob += (iov++)->iov_len; in lnet_iov_nob()
185 while (doffset >= diov->iov_len) { in lnet_copy_iov2iov()
186 doffset -= diov->iov_len; in lnet_copy_iov2iov()
194 while (soffset >= siov->iov_len) { in lnet_copy_iov2iov()
195 soffset -= siov->iov_len; in lnet_copy_iov2iov()
204 this_nob = MIN(diov->iov_len - doffset, in lnet_copy_iov2iov()
205 siov->iov_len - soffset); in lnet_copy_iov2iov()
212 if (diov->iov_len > doffset + this_nob) { in lnet_copy_iov2iov()
220 if (siov->iov_len > soffset + this_nob) { in lnet_copy_iov2iov()
246 while (offset >= src->iov_len) { /* skip initial frags */ in lnet_extract_iov()
[all …]
Dlib-md.c118 if (lmd->md_iov.iov[i].iov_len <= 0) in lnet_md_build()
121 total_length += lmd->md_iov.iov[i].iov_len; in lnet_md_build()
155 lmd->md_iov.iov[0].iov_len = umd->length; in lnet_md_build()
/drivers/vhost/
Dvringh.c80 partlen = min(iov->iov[iov->i].iov_len, len); in vringh_iov_xfer()
88 iov->iov[iov->i].iov_len -= partlen; in vringh_iov_xfer()
91 if (!iov->iov[iov->i].iov_len) { in vringh_iov_xfer()
93 iov->iov[iov->i].iov_len = iov->consumed; in vringh_iov_xfer()
353 iov->iov[iov->used].iov_len = len; in __vringh_iov()
654 BUILD_BUG_ON(offsetof(struct iovec, iov_len) != in vringh_getdesc_user()
655 offsetof(struct kvec, iov_len)); in vringh_getdesc_user()
658 BUILD_BUG_ON(sizeof(((struct iovec *)NULL)->iov_len) in vringh_getdesc_user()
659 != sizeof(((struct kvec *)NULL)->iov_len)); in vringh_getdesc_user()
Dscsi.c224 return (PAGE_ALIGN((unsigned long)iov->iov_base + iov->iov_len) - in iov_num_pages()
631 if ((vq->iov[out].iov_len != sizeof(struct virtio_scsi_event))) { in tcm_vhost_do_evt_work()
633 vq->iov[out].iov_len); in tcm_vhost_do_evt_work()
787 size_t len = iov->iov_len; in vhost_scsi_map_to_sgl()
1054 if (unlikely(vq->iov[out].iov_len != in vhost_scsi_handle_vq()
1057 " bytes\n", vq->iov[out].iov_len); in vhost_scsi_handle_vq()
1075 if (unlikely(vq->iov[0].iov_len < req_size)) { in vhost_scsi_handle_vq()
1077 req_size, vq->iov[0].iov_len); in vhost_scsi_handle_vq()
1129 tmp += vq->iov[data_first + i].iov_len; in vhost_scsi_handle_vq()
1150 exp_data_len += vq->iov[data_first + i].iov_len; in vhost_scsi_handle_vq()
Dnet.c245 size = min(from->iov_len, len); in move_iovec_hdr()
247 to->iov_len = size; in move_iovec_hdr()
248 from->iov_len -= size; in move_iovec_hdr()
265 size = min(from->iov_len, len); in copy_iovec_hdr()
267 to->iov_len = size; in copy_iovec_hdr()
/drivers/infiniband/hw/ipath/
Dipath_user_sdma.c198 iov[i].iov_base, iov[i].iov_len); in ipath_user_sdma_coalesce()
204 mpage += iov[i].iov_len; in ipath_user_sdma_coalesce()
205 len += iov[i].iov_len; in ipath_user_sdma_coalesce()
232 const unsigned long len = iov->iov_len; in ipath_user_sdma_num_pages()
335 addr, iov[idx].iov_len, in ipath_user_sdma_pin_pkt()
420 len = iov[idx].iov_len; in ipath_user_sdma_queue_pkts()
482 const size_t slen = iov[idx].iov_len; in ipath_user_sdma_queue_pkts()
/drivers/misc/mic/host/
Dmic_virtio.c198 total += iov->iov[i].iov_len; in mic_vringh_iov_consumed()
217 partlen = min(iov->iov[iov->i].iov_len, len); in mic_vringh_copy()
221 iov->iov[iov->i].iov_len, in mic_vringh_copy()
226 iov->iov[iov->i].iov_len, in mic_vringh_copy()
237 iov->iov[iov->i].iov_len -= partlen; in mic_vringh_copy()
239 if (!iov->iov[iov->i].iov_len) { in mic_vringh_copy()
241 iov->iov[iov->i].iov_len = iov->consumed; in mic_vringh_copy()
291 len = iov.iov_len; in _mic_virtio_copy()
/drivers/mtd/
Dmtdconcat.c170 total_len += vecs[i].iov_len; in concat_writev()
199 if (size <= vecs_copy[entry_high].iov_len) in concat_writev()
201 size -= vecs_copy[entry_high++].iov_len; in concat_writev()
204 old_iov_len = vecs_copy[entry_high].iov_len; in concat_writev()
205 vecs_copy[entry_high].iov_len = size; in concat_writev()
210 vecs_copy[entry_high].iov_len = old_iov_len - size; in concat_writev()
Dmtdcore.c1094 if (!vecs[i].iov_len) in default_mtd_writev()
1096 ret = mtd_write(mtd, to, vecs[i].iov_len, &thislen, in default_mtd_writev()
1099 if (ret || thislen != vecs[i].iov_len) in default_mtd_writev()
1101 to += vecs[i].iov_len; in default_mtd_writev()
/drivers/infiniband/hw/qib/
Dqib_user_sdma.c584 iov[i].iov_base, iov[i].iov_len); in qib_user_sdma_coalesce()
590 mpage += iov[i].iov_len; in qib_user_sdma_coalesce()
591 len += iov[i].iov_len; in qib_user_sdma_coalesce()
611 const unsigned long len = iov->iov_len; in qib_user_sdma_num_pages()
732 iov[idx].iov_len, npages); in qib_user_sdma_pin_pkt()
835 len = iov[idx].iov_len; in qib_user_sdma_queue_pkts()
880 const size_t slen = iov[idx].iov_len; in qib_user_sdma_queue_pkts()
924 tidsmsize = iov[idx].iov_len; in qib_user_sdma_queue_pkts()
/drivers/mtd/lpddr/
Dlpddr_cmds.c427 if (n > vec->iov_len - vec_seek) in do_write_buffer()
428 n = vec->iov_len - vec_seek; in do_write_buffer()
447 if (vec_seek == vec->iov_len) { in do_write_buffer()
625 vec.iov_len = len; in lpddr_write_buffers()
643 len += vecs[i].iov_len; in lpddr_writev()
/drivers/staging/lustre/lustre/libcfs/linux/
Dlinux-tcpip.c267 .iov_len = nob in libcfs_sock_write()
329 .iov_len = nob in libcfs_sock_read()
/drivers/target/
Dtarget_core_file.c337 iov[i].iov_len = sg->length; in fd_do_rw()
517 iov[i].iov_len = min_t(unsigned int, len_tmp, PAGE_SIZE); in fd_execute_write_same()
518 len_tmp -= iov[i].iov_len; in fd_execute_write_same()
/drivers/media/dvb-core/
Ddvb_net.c82 c = crc32_be( c, iov[j].iov_base, iov[j].iov_len ); in iov_crc32()
634 hexdump( iov[0].iov_base, iov[0].iov_len ); in dvb_net_ule()
635 hexdump( iov[1].iov_base, iov[1].iov_len ); in dvb_net_ule()
636 hexdump( iov[2].iov_base, iov[2].iov_len ); in dvb_net_ule()
/drivers/block/
Dnvme-scsi.c386 xfer_len = min(remaining, sgl.iov_len); in nvme_trans_copy_to_user()
427 xfer_len = min(remaining, sgl.iov_len); in nvme_trans_copy_from_user()
2069 unit_len = sgl.iov_len; in nvme_trans_do_nvme_io()
2175 sum_iov_len += sgl.iov_len; in nvme_trans_io()
2177 if (sgl.iov_len % (1 << ns->lba_shift) != 0) { in nvme_trans_io()
3048 compat_uint_t iov_len; member
3061 get_user(len, &iov32[i].iov_len) || in sg_build_iovec()
3063 put_user(len, &iov[i].iov_len)) in sg_build_iovec()

123