/drivers/net/ethernet/google/gve/ |
D | gve_tx.c | 96 iov[0].iov_len = bytes; in gve_tx_alloc_fifo() 105 iov[0].iov_len -= overflow; in gve_tx_alloc_fifo() 107 iov[1].iov_len = overflow; in gve_tx_alloc_fifo() 398 u64 iov_offset, u64 iov_len) in gve_dma_sync_for_device() argument 400 u64 last_page = (iov_offset + iov_len - 1) / PAGE_SIZE; in gve_dma_sync_for_device() 456 info->iov[hdr_nfrags - 1].iov_len); in gve_tx_add_skb() 464 info->iov[i].iov_len, in gve_tx_add_skb() 469 info->iov[i].iov_len); in gve_tx_add_skb() 472 info->iov[i].iov_len); in gve_tx_add_skb() 473 copy_offset += info->iov[i].iov_len; in gve_tx_add_skb() [all …]
|
/drivers/usb/usbip/ |
D | stub_tx.c | 218 iov[iovnum].iov_len = sizeof(pdu_header); in stub_send_ret_submit() 232 iov[iovnum].iov_len = in stub_send_ret_submit() 254 iov[iovnum].iov_len = size; in stub_send_ret_submit() 261 iov[iovnum].iov_len = urb->actual_length; in stub_send_ret_submit() 280 iov[iovnum].iov_len = in stub_send_ret_submit() 311 iov[iovnum].iov_len = len; in stub_send_ret_submit() 388 iov[0].iov_len = sizeof(pdu_header); in stub_send_ret_unlink()
|
D | vhci_tx.c | 100 iov[iovnum].iov_len = sizeof(pdu_header); in vhci_send_cmd_submit() 110 iov[iovnum].iov_len = sg->length; in vhci_send_cmd_submit() 115 iov[iovnum].iov_len = in vhci_send_cmd_submit() 134 iov[iovnum].iov_len = len; in vhci_send_cmd_submit() 216 iov.iov_len = sizeof(pdu_header); in vhci_send_cmd_unlink()
|
D | vudc_tx.c | 57 iov[0].iov_len = sizeof(pdu_header); in v_send_ret_unlink() 115 iov[iovnum].iov_len = sizeof(pdu_header); in v_send_ret_submit() 123 iov[iovnum].iov_len = urb->actual_length; in v_send_ret_submit() 134 iov[iovnum].iov_len = in v_send_ret_submit() 161 iov[iovnum].iov_len = len; in v_send_ret_submit()
|
/drivers/staging/kpc2000/kpc_dma/ |
D | fileops.c | 19 unsigned int count_pages(unsigned long iov_base, size_t iov_len) in count_pages() argument 22 unsigned long last = ((iov_base + iov_len - 1) & PAGE_MASK) >> PAGE_SHIFT; in count_pages() 35 unsigned long iov_base, size_t iov_len) in kpc_dma_transfer() argument 65 acd->len = iov_len; in kpc_dma_transfer() 66 acd->page_count = count_pages(iov_base, iov_len); in kpc_dma_transfer() 92 …ages(&acd->sgt, acd->user_pages, acd->page_count, iov_base & (PAGE_SIZE - 1), iov_len, GFP_KERNEL); in kpc_dma_transfer()
|
/drivers/xen/xenbus/ |
D | xenbus_xs.c | 286 vec->iov_len = msg->len; in xenbus_dev_request_and_reply() 328 msg.len += iovec[i].iov_len; in xs_talkv() 363 iovec.iov_len = strlen(string) + 1; in xs_single() 491 iovec[0].iov_len = strlen(path) + 1; in xenbus_write() 493 iovec[1].iov_len = strlen(string); in xenbus_write() 663 iov[0].iov_len = strlen(path) + 1; in xs_watch() 665 iov[1].iov_len = strlen(token) + 1; in xs_watch() 676 iov[0].iov_len = strlen(path) + 1; in xs_unwatch() 678 iov[1].iov_len = strlen(token) + 1; in xs_unwatch()
|
/drivers/infiniband/sw/siw/ |
D | siw_qp.c | 427 iov[0].iov_len = sizeof(*term); in siw_send_terminate() 491 iov[1].iov_len = sizeof(*rreq); in siw_send_terminate() 502 iov[1].iov_len = in siw_send_terminate() 505 iov[1].iov_len = in siw_send_terminate() 520 iov[1].iov_len = in siw_send_terminate() 523 iov[1].iov_len = sizeof(struct iwarp_send); in siw_send_terminate() 528 term->ctrl.mpa_len = cpu_to_be16(iov[1].iov_len); in siw_send_terminate() 549 iov[1].iov_len = sizeof(struct iwarp_ctrl_tagged); in siw_send_terminate() 551 iov[1].iov_len = sizeof(struct iwarp_ctrl_untagged); in siw_send_terminate() 562 iov[2].iov_len = sizeof(crc); in siw_send_terminate() [all …]
|
D | siw_qp_tx.c | 299 .iov_len = c_tx->ctrl_len - c_tx->ctrl_sent }; in siw_tx_ctrl() 448 iov[0].iov_len = hdr_len = in siw_tx_hdt() 474 iov[seg].iov_len = sge_len; in siw_tx_hdt() 509 iov[seg].iov_len = plen; in siw_tx_hdt() 571 iov[seg].iov_len = trl_len = MAX_TRAILER - (4 - c_tx->pad); in siw_tx_hdt() 574 iov[seg].iov_len = trl_len = MAX_TRAILER - c_tx->ctrl_sent; in siw_tx_hdt()
|
/drivers/hv/ |
D | channel.c | 968 bufferlist[0].iov_len = sizeof(struct vmpacket_descriptor); in vmbus_sendpacket() 970 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket() 972 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket() 1027 bufferlist[0].iov_len = descsize; in vmbus_sendpacket_pagebuffer() 1029 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket_pagebuffer() 1031 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket_pagebuffer() 1065 bufferlist[0].iov_len = desc_size; in vmbus_sendpacket_mpb_desc() 1067 bufferlist[1].iov_len = bufferlen; in vmbus_sendpacket_mpb_desc() 1069 bufferlist[2].iov_len = (packetlen_aligned - packetlen); in vmbus_sendpacket_mpb_desc()
|
D | ring_buffer.c | 282 totalbytes_towrite += kv_list[i].iov_len; in hv_ringbuffer_write() 316 kv_list[i].iov_len); in hv_ringbuffer_write()
|
/drivers/vhost/ |
D | vsock.c | 105 size_t iov_len, payload_len; in vhost_transport_do_send_pkt() local 150 iov_len = iov_length(&vq->iov[out], in); in vhost_transport_do_send_pkt() 151 if (iov_len < sizeof(pkt->hdr)) { in vhost_transport_do_send_pkt() 153 vq_err(vq, "Buffer len [%zu] too small\n", iov_len); in vhost_transport_do_send_pkt() 157 iov_iter_init(&iov_iter, READ, &vq->iov[out], in, iov_len); in vhost_transport_do_send_pkt() 163 if (payload_len > iov_len - sizeof(pkt->hdr)) in vhost_transport_do_send_pkt() 164 payload_len = iov_len - sizeof(pkt->hdr); in vhost_transport_do_send_pkt()
|
D | vringh.c | 91 partlen = min(iov->iov[iov->i].iov_len, len); in vringh_iov_xfer() 99 iov->iov[iov->i].iov_len -= partlen; in vringh_iov_xfer() 102 if (!iov->iov[iov->i].iov_len) { in vringh_iov_xfer() 104 iov->iov[iov->i].iov_len = iov->consumed; in vringh_iov_xfer() 382 iov->iov[iov->used].iov_len = len; in __vringh_iov() 703 BUILD_BUG_ON(offsetof(struct iovec, iov_len) != in vringh_getdesc_user() 704 offsetof(struct kvec, iov_len)); in vringh_getdesc_user() 707 BUILD_BUG_ON(sizeof(((struct iovec *)NULL)->iov_len) in vringh_getdesc_user() 708 != sizeof(((struct kvec *)NULL)->iov_len)); in vringh_getdesc_user()
|
/drivers/nvme/target/ |
D | tcp.c | 314 u32 iov_len = min_t(u32, length, sg->length - sg_offset); in nvmet_tcp_map_pdu_iovec() local 317 iov->iov_len = iov_len; in nvmet_tcp_map_pdu_iovec() 319 length -= iov_len; in nvmet_tcp_map_pdu_iovec() 401 sg_init_one(&sg, iov->iov_base, iov->iov_len); in nvmet_tcp_recv_ddgst() 402 ahash_request_set_crypt(hash, &sg, NULL, iov->iov_len); in nvmet_tcp_recv_ddgst() 697 .iov_len = left in nvmet_try_send_ddgst() 706 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvmet_try_send_ddgst() 884 iov.iov_len = sizeof(*icresp); in nvmet_tcp_handle_icreq() 885 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvmet_tcp_handle_icreq() 1080 iov.iov_len = queue->left; in nvmet_tcp_try_recv_pdu() [all …]
|
/drivers/mtd/ |
D | mtdconcat.c | 197 total_len += vecs[i].iov_len; in concat_writev() 226 if (size <= vecs_copy[entry_high].iov_len) in concat_writev() 228 size -= vecs_copy[entry_high++].iov_len; in concat_writev() 231 old_iov_len = vecs_copy[entry_high].iov_len; in concat_writev() 232 vecs_copy[entry_high].iov_len = size; in concat_writev() 237 vecs_copy[entry_high].iov_len = old_iov_len - size; in concat_writev()
|
/drivers/infiniband/hw/qib/ |
D | qib_user_sdma.c | 581 iov[i].iov_base, iov[i].iov_len); in qib_user_sdma_coalesce() 587 mpage += iov[i].iov_len; in qib_user_sdma_coalesce() 588 len += iov[i].iov_len; in qib_user_sdma_coalesce() 608 const unsigned long len = iov->iov_len; in qib_user_sdma_num_pages() 729 iov[idx].iov_len, npages); in qib_user_sdma_pin_pkt() 832 len = iov[idx].iov_len; in qib_user_sdma_queue_pkts() 877 const size_t slen = iov[idx].iov_len; in qib_user_sdma_queue_pkts() 925 tidsmsize = iov[idx].iov_len; in qib_user_sdma_queue_pkts()
|
D | qib_common.h | 490 __u64 iov_len; member
|
/drivers/mtd/lpddr/ |
D | lpddr_cmds.c | 441 if (n > vec->iov_len - vec_seek) in do_write_buffer() 442 n = vec->iov_len - vec_seek; in do_write_buffer() 461 if (vec_seek == vec->iov_len) { in do_write_buffer() 639 vec.iov_len = len; in lpddr_write_buffers() 657 len += vecs[i].iov_len; in lpddr_writev()
|
/drivers/xen/ |
D | pvcalls-back.c | 131 vec[0].iov_len = wanted; in pvcalls_conn_back_read() 135 vec[0].iov_len = array_size - masked_prod; in pvcalls_conn_back_read() 137 vec[1].iov_len = wanted - vec[0].iov_len; in pvcalls_conn_back_read() 190 vec[0].iov_len = size; in pvcalls_conn_back_write() 194 vec[0].iov_len = array_size - pvcalls_mask(cons, array_size); in pvcalls_conn_back_write() 196 vec[1].iov_len = size - vec[0].iov_len; in pvcalls_conn_back_write()
|
/drivers/target/iscsi/ |
D | iscsi_target.c | 509 iov[niov++].iov_len = ISCSI_HDR_LEN; in iscsit_xmit_nondatain_pdu() 518 iov[0].iov_len += ISCSI_CRC_LEN; in iscsit_xmit_nondatain_pdu() 529 iov[niov++].iov_len = data_buf_len; in iscsit_xmit_nondatain_pdu() 534 iov[niov++].iov_len = padding; in iscsit_xmit_nondatain_pdu() 547 iov[niov++].iov_len = ISCSI_CRC_LEN; in iscsit_xmit_nondatain_pdu() 582 iov[iov_count++].iov_len = ISCSI_HDR_LEN; in iscsit_xmit_datain_pdu() 592 iov[0].iov_len += ISCSI_CRC_LEN; in iscsit_xmit_datain_pdu() 611 iov[iov_count++].iov_len = cmd->padding; in iscsit_xmit_datain_pdu() 625 iov[iov_count++].iov_len = ISCSI_CRC_LEN; in iscsit_xmit_datain_pdu() 917 iov[i].iov_len = cur_len; in iscsit_map_iovec() [all …]
|
/drivers/infiniband/hw/hfi1/ |
D | user_sdma.c | 358 if (iovec[idx].iov_len < sizeof(info) + sizeof(req->hdr)) { in hfi1_user_sdma_process_request() 363 iovec[idx].iov_len, sizeof(info) + sizeof(req->hdr)); in hfi1_user_sdma_process_request() 525 if (req->iovs[i].iov.iov_len == 0) { in hfi1_user_sdma_process_request() 529 req->data_len += req->iovs[i].iov.iov_len; in hfi1_user_sdma_process_request() 544 u16 ntids = iovec[idx].iov_len / sizeof(*req->tids); in hfi1_user_sdma_process_request() 800 if (READ_ONCE(iovec->offset) == iovec->iov.iov_len) { in user_sdma_send_pkts() 1638 from_this_iovec = iovec->iov.iov_len - iovec->offset; in add_system_pages_to_sdma_packet()
|
/drivers/soc/qcom/ |
D | qmi_interface.c | 533 iv.iov_len = qmi->recv_buf_size; in qmi_data_ready_work() 538 iv.iov_len, MSG_DONTWAIT); in qmi_data_ready_work() 754 iv.iov_len = len; in qmi_send_message()
|
/drivers/fsi/ |
D | fsi-sbefifo.c | 642 ffdc_iov.iov_len = SBEFIFO_MAX_FFDC_SIZE; in sbefifo_collect_async_ffdc() 739 resp_iov.iov_len = rbytes; in sbefifo_submit() 819 resp_iov.iov_len = len; in sbefifo_user_read()
|
/drivers/media/dvb-core/ |
D | dvb_net.c | 64 c = crc32_be( c, iov[j].iov_base, iov[j].iov_len ); in iov_crc32() 660 hexdump(iov[0].iov_base, iov[0].iov_len); in dvb_net_ule_check_crc() 661 hexdump(iov[1].iov_base, iov[1].iov_len); in dvb_net_ule_check_crc() 662 hexdump(iov[2].iov_base, iov[2].iov_len); in dvb_net_ule_check_crc()
|
/drivers/block/rnbd/ |
D | rnbd-clt.c | 468 .iov_len = sizeof(msg) in send_msg_close() 549 .iov_len = sizeof(msg) in send_msg_open() 595 .iov_len = sizeof(msg) in send_msg_sess_info() 1033 .iov_len = sizeof(msg) in rnbd_client_xfer_request()
|
/drivers/nvme/host/ |
D | tcp.c | 1088 .iov_len = NVME_TCP_DIGEST_LENGTH - req->offset in nvme_tcp_try_send_ddgst() 1096 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvme_tcp_try_send_ddgst() 1310 iov.iov_len = sizeof(*icreq); in nvme_tcp_init_connection() 1311 ret = kernel_sendmsg(queue->sock, &msg, &iov, 1, iov.iov_len); in nvme_tcp_init_connection() 1317 iov.iov_len = sizeof(*icresp); in nvme_tcp_init_connection() 1319 iov.iov_len, msg.msg_flags); in nvme_tcp_init_connection()
|