/drivers/net/ethernet/google/gve/ |
D | gve_tx.c | 97 iov[0].iov_len = bytes; in gve_tx_alloc_fifo() 106 iov[0].iov_len -= overflow; in gve_tx_alloc_fifo() 108 iov[1].iov_len = overflow; in gve_tx_alloc_fifo() 408 u64 iov_offset, u64 iov_len) in gve_dma_sync_for_device() argument 410 u64 last_page = (iov_offset + iov_len - 1) / PAGE_SIZE; in gve_dma_sync_for_device() 460 info->iov[hdr_nfrags - 1].iov_len); in gve_tx_add_skb_copy() 468 info->iov[i].iov_len, in gve_tx_add_skb_copy() 473 info->iov[i].iov_len); in gve_tx_add_skb_copy() 476 info->iov[i].iov_len); in gve_tx_add_skb_copy() 477 copy_offset += info->iov[i].iov_len; in gve_tx_add_skb_copy() [all …]
|
/drivers/usb/usbip/ |
D | stub_tx.c | 218 iov[iovnum].iov_len = sizeof(pdu_header); in stub_send_ret_submit() 232 iov[iovnum].iov_len = in stub_send_ret_submit() 254 iov[iovnum].iov_len = size; in stub_send_ret_submit() 261 iov[iovnum].iov_len = urb->actual_length; in stub_send_ret_submit() 280 iov[iovnum].iov_len = in stub_send_ret_submit() 311 iov[iovnum].iov_len = len; in stub_send_ret_submit() 388 iov[0].iov_len = sizeof(pdu_header); in stub_send_ret_unlink()
|
D | vhci_tx.c | 100 iov[iovnum].iov_len = sizeof(pdu_header); in vhci_send_cmd_submit() 110 iov[iovnum].iov_len = sg->length; in vhci_send_cmd_submit() 115 iov[iovnum].iov_len = in vhci_send_cmd_submit() 134 iov[iovnum].iov_len = len; in vhci_send_cmd_submit() 216 iov.iov_len = sizeof(pdu_header); in vhci_send_cmd_unlink()
|
D | vudc_tx.c | 57 iov[0].iov_len = sizeof(pdu_header); in v_send_ret_unlink() 115 iov[iovnum].iov_len = sizeof(pdu_header); in v_send_ret_submit() 123 iov[iovnum].iov_len = urb->actual_length; in v_send_ret_submit() 134 iov[iovnum].iov_len = in v_send_ret_submit() 161 iov[iovnum].iov_len = len; in v_send_ret_submit()
|
/drivers/xen/xenbus/ |
D | xenbus_xs.c | 286 vec->iov_len = msg->len; in xenbus_dev_request_and_reply() 328 msg.len += iovec[i].iov_len; in xs_talkv() 363 iovec.iov_len = strlen(string) + 1; in xs_single() 491 iovec[0].iov_len = strlen(path) + 1; in xenbus_write() 493 iovec[1].iov_len = strlen(string); in xenbus_write() 663 iov[0].iov_len = strlen(path) + 1; in xs_watch() 665 iov[1].iov_len = strlen(token) + 1; in xs_watch() 676 iov[0].iov_len = strlen(path) + 1; in xs_unwatch() 678 iov[1].iov_len = strlen(token) + 1; in xs_unwatch()
|
/drivers/infiniband/sw/siw/ |
D | siw_qp.c | 427 iov[0].iov_len = sizeof(*term); in siw_send_terminate() 491 iov[1].iov_len = sizeof(*rreq); in siw_send_terminate() 502 iov[1].iov_len = in siw_send_terminate() 505 iov[1].iov_len = in siw_send_terminate() 520 iov[1].iov_len = in siw_send_terminate() 523 iov[1].iov_len = sizeof(struct iwarp_send); in siw_send_terminate() 528 term->ctrl.mpa_len = cpu_to_be16(iov[1].iov_len); in siw_send_terminate() 549 iov[1].iov_len = sizeof(struct iwarp_ctrl_tagged); in siw_send_terminate() 551 iov[1].iov_len = sizeof(struct iwarp_ctrl_untagged); in siw_send_terminate() 562 iov[2].iov_len = sizeof(crc); in siw_send_terminate() [all …]
|
D | siw_qp_tx.c | 299 .iov_len = c_tx->ctrl_len - c_tx->ctrl_sent }; in siw_tx_ctrl() 455 iov[0].iov_len = hdr_len = in siw_tx_hdt() 481 iov[seg].iov_len = sge_len; in siw_tx_hdt() 521 iov[seg].iov_len = plen; in siw_tx_hdt() 581 iov[seg].iov_len = trl_len = MAX_TRAILER - (4 - c_tx->pad); in siw_tx_hdt() 584 iov[seg].iov_len = trl_len = MAX_TRAILER - c_tx->ctrl_sent; in siw_tx_hdt()
|
/drivers/vhost/ |
D | vringh.c | 86 size_t partlen = min(iov->iov[iov->i].iov_len, len); in vringh_kiov_advance() 89 iov->iov[iov->i].iov_len -= partlen; in vringh_kiov_advance() 92 if (!iov->iov[iov->i].iov_len) { in vringh_kiov_advance() 94 iov->iov[iov->i].iov_len = iov->consumed; in vringh_kiov_advance() 119 partlen = min(iov->iov[iov->i].iov_len, len); in vringh_iov_xfer() 127 iov->iov[iov->i].iov_len -= partlen; in vringh_iov_xfer() 130 if (!iov->iov[iov->i].iov_len) { in vringh_iov_xfer() 132 iov->iov[iov->i].iov_len = iov->consumed; in vringh_iov_xfer() 410 iov->iov[iov->used].iov_len = len; in __vringh_iov() 734 BUILD_BUG_ON(offsetof(struct iovec, iov_len) != in vringh_getdesc_user() [all …]
|
D | vsock.c | 115 size_t iov_len, payload_len; in vhost_transport_do_send_pkt() local 161 iov_len = iov_length(&vq->iov[out], in); in vhost_transport_do_send_pkt() 162 if (iov_len < sizeof(pkt->hdr)) { in vhost_transport_do_send_pkt() 164 vq_err(vq, "Buffer len [%zu] too small\n", iov_len); in vhost_transport_do_send_pkt() 168 iov_iter_init(&iov_iter, READ, &vq->iov[out], in, iov_len); in vhost_transport_do_send_pkt() 174 if (payload_len > iov_len - sizeof(pkt->hdr)) { in vhost_transport_do_send_pkt() 175 payload_len = iov_len - sizeof(pkt->hdr); in vhost_transport_do_send_pkt()
|
/drivers/hv/ |
D | channel.c | 1036 bufferlist[0].iov_len = sizeof(struct vmpacket_descriptor); in vmbus_sendpacket() 1038 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket() 1040 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket() 1095 bufferlist[0].iov_len = descsize; in vmbus_sendpacket_pagebuffer() 1097 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket_pagebuffer() 1099 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket_pagebuffer() 1133 bufferlist[0].iov_len = desc_size; in vmbus_sendpacket_mpb_desc() 1135 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket_mpb_desc() 1137 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket_mpb_desc()
|
D | ring_buffer.c | 285 totalbytes_towrite += kv_list[i].iov_len; in hv_ringbuffer_write() 319 kv_list[i].iov_len); in hv_ringbuffer_write()
|
/drivers/nvme/target/ |
D | tcp.c | 326 u32 iov_len = min_t(u32, length, sg->length - sg_offset); in nvmet_tcp_map_pdu_iovec() local 329 iov->iov_len = iov_len; in nvmet_tcp_map_pdu_iovec() 331 length -= iov_len; in nvmet_tcp_map_pdu_iovec() 413 sg_init_one(&sg, iov->iov_base, iov->iov_len); in nvmet_tcp_recv_ddgst() 414 ahash_request_set_crypt(hash, &sg, NULL, iov->iov_len); in nvmet_tcp_recv_ddgst() 709 .iov_len = left in nvmet_try_send_ddgst() 718 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvmet_try_send_ddgst() 896 iov.iov_len = sizeof(*icresp); in nvmet_tcp_handle_icreq() 897 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvmet_tcp_handle_icreq() 1092 iov.iov_len = queue->left; in nvmet_tcp_try_recv_pdu() [all …]
|
/drivers/mtd/ |
D | mtdconcat.c | 197 total_len += vecs[i].iov_len; in concat_writev() 226 if (size <= vecs_copy[entry_high].iov_len) in concat_writev() 228 size -= vecs_copy[entry_high++].iov_len; in concat_writev() 231 old_iov_len = vecs_copy[entry_high].iov_len; in concat_writev() 232 vecs_copy[entry_high].iov_len = size; in concat_writev() 237 vecs_copy[entry_high].iov_len = old_iov_len - size; in concat_writev()
|
/drivers/infiniband/hw/qib/ |
D | qib_user_sdma.c | 581 iov[i].iov_base, iov[i].iov_len); in qib_user_sdma_coalesce() 587 mpage += iov[i].iov_len; in qib_user_sdma_coalesce() 588 len += iov[i].iov_len; in qib_user_sdma_coalesce() 608 const unsigned long len = iov->iov_len; in qib_user_sdma_num_pages() 729 iov[idx].iov_len, npages); in qib_user_sdma_pin_pkt() 832 len = iov[idx].iov_len; in qib_user_sdma_queue_pkts() 877 const size_t slen = iov[idx].iov_len; in qib_user_sdma_queue_pkts() 925 tidsmsize = iov[idx].iov_len; in qib_user_sdma_queue_pkts()
|
D | qib_common.h | 490 __u64 iov_len; member
|
/drivers/mtd/lpddr/ |
D | lpddr_cmds.c | 441 if (n > vec->iov_len - vec_seek) in do_write_buffer() 442 n = vec->iov_len - vec_seek; in do_write_buffer() 461 if (vec_seek == vec->iov_len) { in do_write_buffer() 639 vec.iov_len = len; in lpddr_write_buffers() 657 len += vecs[i].iov_len; in lpddr_writev()
|
/drivers/xen/ |
D | pvcalls-back.c | 131 vec[0].iov_len = wanted; in pvcalls_conn_back_read() 135 vec[0].iov_len = array_size - masked_prod; in pvcalls_conn_back_read() 137 vec[1].iov_len = wanted - vec[0].iov_len; in pvcalls_conn_back_read() 190 vec[0].iov_len = size; in pvcalls_conn_back_write() 194 vec[0].iov_len = array_size - pvcalls_mask(cons, array_size); in pvcalls_conn_back_write() 196 vec[1].iov_len = size - vec[0].iov_len; in pvcalls_conn_back_write()
|
/drivers/target/iscsi/ |
D | iscsi_target.c | 509 iov[niov++].iov_len = ISCSI_HDR_LEN; in iscsit_xmit_nondatain_pdu() 518 iov[0].iov_len += ISCSI_CRC_LEN; in iscsit_xmit_nondatain_pdu() 529 iov[niov++].iov_len = data_buf_len; in iscsit_xmit_nondatain_pdu() 534 iov[niov++].iov_len = padding; in iscsit_xmit_nondatain_pdu() 547 iov[niov++].iov_len = ISCSI_CRC_LEN; in iscsit_xmit_nondatain_pdu() 582 iov[iov_count++].iov_len = ISCSI_HDR_LEN; in iscsit_xmit_datain_pdu() 592 iov[0].iov_len += ISCSI_CRC_LEN; in iscsit_xmit_datain_pdu() 611 iov[iov_count++].iov_len = cmd->padding; in iscsit_xmit_datain_pdu() 625 iov[iov_count++].iov_len = ISCSI_CRC_LEN; in iscsit_xmit_datain_pdu() 917 iov[i].iov_len = cur_len; in iscsit_map_iovec() [all …]
|
/drivers/infiniband/hw/hfi1/ |
D | user_sdma.c | 317 if (iovec[idx].iov_len < sizeof(info) + sizeof(req->hdr)) { in hfi1_user_sdma_process_request() 322 iovec[idx].iov_len, sizeof(info) + sizeof(req->hdr)); in hfi1_user_sdma_process_request() 484 if (req->iovs[i].iov.iov_len == 0) { in hfi1_user_sdma_process_request() 488 req->data_len += req->iovs[i].iov.iov_len; in hfi1_user_sdma_process_request() 503 u16 ntids = iovec[idx].iov_len / sizeof(*req->tids); in hfi1_user_sdma_process_request() 759 if (READ_ONCE(iovec->offset) == iovec->iov.iov_len) { in user_sdma_send_pkts() 1597 from_this_iovec = iovec->iov.iov_len - iovec->offset; in add_system_pages_to_sdma_packet()
|
/drivers/soc/qcom/ |
D | qmi_interface.c | 533 iv.iov_len = qmi->recv_buf_size; in qmi_data_ready_work() 538 iv.iov_len, MSG_DONTWAIT); in qmi_data_ready_work() 754 iv.iov_len = len; in qmi_send_message()
|
/drivers/fsi/ |
D | fsi-sbefifo.c | 642 ffdc_iov.iov_len = SBEFIFO_MAX_FFDC_SIZE; in sbefifo_collect_async_ffdc() 739 resp_iov.iov_len = rbytes; in sbefifo_submit() 819 resp_iov.iov_len = len; in sbefifo_user_read()
|
/drivers/media/dvb-core/ |
D | dvb_net.c | 64 c = crc32_be( c, iov[j].iov_base, iov[j].iov_len ); in iov_crc32() 660 hexdump(iov[0].iov_base, iov[0].iov_len); in dvb_net_ule_check_crc() 661 hexdump(iov[1].iov_base, iov[1].iov_len); in dvb_net_ule_check_crc() 662 hexdump(iov[2].iov_base, iov[2].iov_len); in dvb_net_ule_check_crc()
|
/drivers/vdpa/vdpa_sim/ |
D | vdpa_sim_blk.c | 84 if (vq->in_iov.iov[vq->in_iov.used - 1].iov_len < 1) { in vdpasim_blk_handle_req()
|
/drivers/block/rnbd/ |
D | rnbd-clt.c | 480 .iov_len = sizeof(msg) in send_msg_close() 559 .iov_len = sizeof(msg) in send_msg_open() 605 .iov_len = sizeof(msg) in send_msg_sess_info() 1044 .iov_len = sizeof(msg) in rnbd_client_xfer_request()
|
/drivers/nvme/host/ |
D | tcp.c | 1101 .iov_len = NVME_TCP_DIGEST_LENGTH - req->offset in nvme_tcp_try_send_ddgst() 1109 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvme_tcp_try_send_ddgst() 1324 iov.iov_len = sizeof(*icreq); in nvme_tcp_init_connection() 1325 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvme_tcp_init_connection() 1331 iov.iov_len = sizeof(*icresp); in nvme_tcp_init_connection() 1333 iov.iov_len, msg.msg_flags); in nvme_tcp_init_connection()
|