Home
last modified time | relevance | path

Searched refs:iov (Results 1 – 25 of 66) sorted by relevance

123

/drivers/pci/
Diov.c68 struct pci_sriov *iov = dev->sriov; in virtfn_add() local
71 mutex_lock(&iov->dev->sriov->lock); in virtfn_add()
82 pci_read_config_word(dev, iov->pos + PCI_SRIOV_VF_DID, &virtfn->device); in virtfn_add()
96 do_div(size, iov->total_VFs); in virtfn_add()
107 mutex_unlock(&iov->dev->sriov->lock); in virtfn_add()
127 mutex_lock(&iov->dev->sriov->lock); in virtfn_add()
132 mutex_unlock(&iov->dev->sriov->lock); in virtfn_add()
141 struct pci_sriov *iov = dev->sriov; in virtfn_remove() local
164 mutex_lock(&iov->dev->sriov->lock); in virtfn_remove()
167 mutex_unlock(&iov->dev->sriov->lock); in virtfn_remove()
[all …]
/drivers/vhost/
Dvringh.c70 static inline ssize_t vringh_iov_xfer(struct vringh_kiov *iov, in vringh_iov_xfer() argument
77 while (len && iov->i < iov->used) { in vringh_iov_xfer()
80 partlen = min(iov->iov[iov->i].iov_len, len); in vringh_iov_xfer()
81 err = xfer(iov->iov[iov->i].iov_base, ptr, partlen); in vringh_iov_xfer()
87 iov->consumed += partlen; in vringh_iov_xfer()
88 iov->iov[iov->i].iov_len -= partlen; in vringh_iov_xfer()
89 iov->iov[iov->i].iov_base += partlen; in vringh_iov_xfer()
91 if (!iov->iov[iov->i].iov_len) { in vringh_iov_xfer()
93 iov->iov[iov->i].iov_len = iov->consumed; in vringh_iov_xfer()
94 iov->iov[iov->i].iov_base -= iov->consumed; in vringh_iov_xfer()
[all …]
Dscsi.c222 static int iov_num_pages(struct iovec *iov) in iov_num_pages() argument
224 return (PAGE_ALIGN((unsigned long)iov->iov_base + iov->iov_len) - in iov_num_pages()
225 ((unsigned long)iov->iov_base & PAGE_MASK)) >> PAGE_SHIFT; in iov_num_pages()
617 head = vhost_get_vq_desc(vq, vq->iov, in tcm_vhost_do_evt_work()
618 ARRAY_SIZE(vq->iov), &out, &in, in tcm_vhost_do_evt_work()
631 if ((vq->iov[out].iov_len != sizeof(struct virtio_scsi_event))) { in tcm_vhost_do_evt_work()
633 vq->iov[out].iov_len); in tcm_vhost_do_evt_work()
643 eventp = vq->iov[out].iov_base; in tcm_vhost_do_evt_work()
780 struct iovec *iov, in vhost_scsi_map_to_sgl() argument
786 void __user *ptr = iov->iov_base; in vhost_scsi_map_to_sgl()
[all …]
Dnet.c345 .msg_iov = vq->iov, in handle_tx()
377 head = vhost_get_vq_desc(vq, vq->iov, in handle_tx()
378 ARRAY_SIZE(vq->iov), in handle_tx()
398 s = move_iovec_hdr(vq->iov, nvq->hdr, hdr_size, out); in handle_tx()
400 len = iov_length(vq->iov, out); in handle_tx()
509 r = vhost_get_vq_desc(vq, vq->iov + seg, in get_rx_bufs()
510 ARRAY_SIZE(vq->iov) - seg, &out, in get_rx_bufs()
531 heads[headcount].len = iov_length(vq->iov + seg, in); in get_rx_bufs()
565 .msg_iov = vq->iov, in handle_rx()
624 move_iovec_hdr(vq->iov, nvq->hdr, vhost_hlen, in); in handle_rx()
[all …]
/drivers/misc/mic/host/
Dmic_virtio.c192 static inline u32 mic_vringh_iov_consumed(struct vringh_kiov *iov) in mic_vringh_iov_consumed() argument
195 u32 total = iov->consumed; in mic_vringh_iov_consumed()
197 for (i = 0; i < iov->i; i++) in mic_vringh_iov_consumed()
198 total += iov->iov[i].iov_len; in mic_vringh_iov_consumed()
209 static int mic_vringh_copy(struct mic_vdev *mvdev, struct vringh_kiov *iov, in mic_vringh_copy() argument
216 while (len && iov->i < iov->used) { in mic_vringh_copy()
217 partlen = min(iov->iov[iov->i].iov_len, len); in mic_vringh_copy()
220 (u64)iov->iov[iov->i].iov_base, in mic_vringh_copy()
221 iov->iov[iov->i].iov_len, in mic_vringh_copy()
225 (u64)iov->iov[iov->i].iov_base, in mic_vringh_copy()
[all …]
/drivers/usb/usbip/
Dvhci_tx.c69 struct kvec iov[3]; in vhci_send_cmd_submit() local
83 memset(&iov, 0, sizeof(iov)); in vhci_send_cmd_submit()
91 iov[0].iov_base = &pdu_header; in vhci_send_cmd_submit()
92 iov[0].iov_len = sizeof(pdu_header); in vhci_send_cmd_submit()
97 iov[1].iov_base = urb->transfer_buffer; in vhci_send_cmd_submit()
98 iov[1].iov_len = urb->transfer_buffer_length; in vhci_send_cmd_submit()
113 iov[2].iov_base = iso_buffer; in vhci_send_cmd_submit()
114 iov[2].iov_len = len; in vhci_send_cmd_submit()
118 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, iov, 3, txsize); in vhci_send_cmd_submit()
158 struct kvec iov[3]; in vhci_send_cmd_unlink() local
[all …]
Dstub_tx.c174 struct kvec *iov = NULL; in stub_send_ret_submit() local
193 iov = kcalloc(iovnum, sizeof(struct kvec), GFP_KERNEL); in stub_send_ret_submit()
195 if (!iov) { in stub_send_ret_submit()
208 iov[iovnum].iov_base = &pdu_header; in stub_send_ret_submit()
209 iov[iovnum].iov_len = sizeof(pdu_header); in stub_send_ret_submit()
217 iov[iovnum].iov_base = urb->transfer_buffer; in stub_send_ret_submit()
218 iov[iovnum].iov_len = urb->actual_length; in stub_send_ret_submit()
234 iov[iovnum].iov_base = urb->transfer_buffer + in stub_send_ret_submit()
236 iov[iovnum].iov_len = in stub_send_ret_submit()
247 kfree(iov); in stub_send_ret_submit()
[all …]
/drivers/infiniband/hw/ipath/
Dipath_user_sdma.c177 const struct iovec *iov, in ipath_user_sdma_coalesce() argument
198 iov[i].iov_base, iov[i].iov_len); in ipath_user_sdma_coalesce()
204 mpage += iov[i].iov_len; in ipath_user_sdma_coalesce()
205 len += iov[i].iov_len; in ipath_user_sdma_coalesce()
229 static int ipath_user_sdma_num_pages(const struct iovec *iov) in ipath_user_sdma_num_pages() argument
231 const unsigned long addr = (unsigned long) iov->iov_base; in ipath_user_sdma_num_pages()
232 const unsigned long len = iov->iov_len; in ipath_user_sdma_num_pages()
324 const struct iovec *iov, in ipath_user_sdma_pin_pkt() argument
331 const int npages = ipath_user_sdma_num_pages(iov + idx); in ipath_user_sdma_pin_pkt()
332 const unsigned long addr = (unsigned long) iov[idx].iov_base; in ipath_user_sdma_pin_pkt()
[all …]
/drivers/net/ethernet/intel/fm10k/
Dfm10k_iov.c84 hw->iov.ops.reset_resources(hw, vf_info); in fm10k_iov_event()
153 hw->iov.ops.reset_lport(hw, vf_info); in fm10k_iov_mbx()
157 hw->iov.ops.reset_resources(hw, vf_info); in fm10k_iov_mbx()
207 hw->iov.ops.reset_resources(hw, vf_info); in fm10k_iov_suspend()
208 hw->iov.ops.reset_lport(hw, vf_info); in fm10k_iov_suspend()
228 hw->iov.ops.assign_resources(hw, num_vfs, num_vfs); in fm10k_iov_resume()
236 dglort.vsi_l = fls(hw->iov.total_vfs - 1); in fm10k_iov_resume()
250 hw->iov.ops.set_lport(hw, vf_info, i, in fm10k_iov_resume()
257 hw->iov.ops.assign_default_mac_vlan(hw, vf_info); in fm10k_iov_resume()
285 hw->iov.ops.assign_default_mac_vlan(hw, vf_info); in fm10k_iov_update_pvid()
[all …]
Dfm10k_pf.c182 hw->iov.total_vfs = fm10k_is_ari_hierarchy_pf(hw) ? 64 : 7; in fm10k_init_hw_pf()
464 if (!hw->iov.num_vfs) in fm10k_update_int_moderator_pf()
594 u16 num_pools = hw->iov.num_pools; in fm10k_queues_per_pool()
602 u16 num_vfs = hw->iov.num_vfs; in fm10k_vf_queue_index()
612 u16 num_pools = hw->iov.num_pools; in fm10k_vectors_per_pool()
648 if ((num_vfs > num_pools) || (num_vfs > hw->iov.total_vfs)) in fm10k_iov_assign_resources_pf()
652 hw->iov.num_vfs = num_vfs; in fm10k_iov_assign_resources_pf()
653 hw->iov.num_pools = num_pools; in fm10k_iov_assign_resources_pf()
757 if (vf_idx >= hw->iov.num_vfs) in fm10k_iov_configure_tc_pf()
815 if (vf_idx >= hw->iov.num_vfs) in fm10k_iov_assign_int_moderator_pf()
[all …]
/drivers/target/iscsi/
Discsi_target.c732 struct kvec *iov, in iscsit_map_iovec() argument
752 iov[i].iov_base = kmap(sg_page(sg)) + sg->offset + page_off; in iscsit_map_iovec()
753 iov[i].iov_len = cur_len; in iscsit_map_iovec()
1392 struct kvec *iov; in iscsit_get_dataout() local
1398 iov = &cmd->iov_data[0]; in iscsit_get_dataout()
1400 iov_ret = iscsit_map_iovec(cmd, iov, be32_to_cpu(hdr->offset), in iscsit_get_dataout()
1409 iov[iov_count].iov_base = cmd->pad_bytes; in iscsit_get_dataout()
1410 iov[iov_count++].iov_len = padding; in iscsit_get_dataout()
1416 iov[iov_count].iov_base = &checksum; in iscsit_get_dataout()
1417 iov[iov_count++].iov_len = ISCSI_CRC_LEN; in iscsit_get_dataout()
[all …]
Discsi_target_util.c1162 struct kvec *iov; in iscsit_send_tx_data() local
1168 iov = &cmd->iov_data[0]; in iscsit_send_tx_data()
1171 iov = &cmd->iov_misc[0]; in iscsit_send_tx_data()
1175 tx_sent = tx_data(conn, &iov[0], iov_count, tx_size); in iscsit_send_tx_data()
1193 struct kvec iov; in iscsit_fe_sendpage_sg() local
1203 iov.iov_base = cmd->pdu; in iscsit_fe_sendpage_sg()
1204 iov.iov_len = tx_hdr_size; in iscsit_fe_sendpage_sg()
1206 tx_sent = tx_data(conn, &iov, 1, tx_hdr_size); in iscsit_fe_sendpage_sg()
1340 iov_p = count->iov; in iscsit_do_rx_data()
1377 iov_p = count->iov; in iscsit_do_tx_data()
[all …]
Discsi_target_erl1.c53 struct kvec iov; in iscsit_dump_data_payload() local
66 memset(&iov, 0, sizeof(struct kvec)); in iscsit_dump_data_payload()
72 iov.iov_len = size; in iscsit_dump_data_payload()
73 iov.iov_base = buf; in iscsit_dump_data_payload()
75 rx_got = rx_data(conn, &iov, 1, size); in iscsit_dump_data_payload()
89 iov.iov_len = padding; in iscsit_dump_data_payload()
90 iov.iov_base = pad_bytes; in iscsit_dump_data_payload()
92 rx_got = rx_data(conn, &iov, 1, padding); in iscsit_dump_data_payload()
102 iov.iov_len = ISCSI_CRC_LEN; in iscsit_dump_data_payload()
103 iov.iov_base = &data_crc; in iscsit_dump_data_payload()
[all …]
/drivers/staging/lustre/lnet/lnet/
Dlo.c50 struct iovec *iov, lnet_kiov_t *kiov, in lolnd_recv() argument
57 if (iov != NULL) in lolnd_recv()
58 lnet_copy_iov2iov(niov, iov, offset, in lolnd_recv()
68 if (iov != NULL) in lolnd_recv()
69 lnet_copy_kiov2iov(niov, iov, offset, in lolnd_recv()
Dlib-md.c112 memcpy(lmd->md_iov.iov, umd->start, in lnet_md_build()
113 niov * sizeof(lmd->md_iov.iov[0])); in lnet_md_build()
118 if (lmd->md_iov.iov[i].iov_len <= 0) in lnet_md_build()
121 total_length += lmd->md_iov.iov[i].iov_len; in lnet_md_build()
154 lmd->md_iov.iov[0].iov_base = umd->start; in lnet_md_build()
155 lmd->md_iov.iov[0].iov_len = umd->length; in lnet_md_build()
Dlib-move.c161 lnet_iov_nob(unsigned int niov, struct iovec *iov) in lnet_iov_nob() argument
166 nob += (iov++)->iov_len; in lnet_iov_nob()
374 lnet_copy_kiov2iov(unsigned int niov, struct iovec *iov, unsigned int iovoffset, in lnet_copy_kiov2iov() argument
388 while (iovoffset >= iov->iov_len) { in lnet_copy_kiov2iov()
389 iovoffset -= iov->iov_len; in lnet_copy_kiov2iov()
390 iov++; in lnet_copy_kiov2iov()
406 this_nob = MIN(iov->iov_len - iovoffset, in lnet_copy_kiov2iov()
414 memcpy((char *)iov->iov_base + iovoffset, addr, this_nob); in lnet_copy_kiov2iov()
417 if (iov->iov_len > iovoffset + this_nob) { in lnet_copy_kiov2iov()
420 iov++; in lnet_copy_kiov2iov()
[all …]
/drivers/infiniband/hw/qib/
Dqib_user_sdma.c563 const struct iovec *iov, in qib_user_sdma_coalesce() argument
584 iov[i].iov_base, iov[i].iov_len); in qib_user_sdma_coalesce()
590 mpage += iov[i].iov_len; in qib_user_sdma_coalesce()
591 len += iov[i].iov_len; in qib_user_sdma_coalesce()
608 static int qib_user_sdma_num_pages(const struct iovec *iov) in qib_user_sdma_num_pages() argument
610 const unsigned long addr = (unsigned long) iov->iov_base; in qib_user_sdma_num_pages()
611 const unsigned long len = iov->iov_len; in qib_user_sdma_num_pages()
721 const struct iovec *iov, in qib_user_sdma_pin_pkt() argument
728 const int npages = qib_user_sdma_num_pages(iov + idx); in qib_user_sdma_pin_pkt()
729 const unsigned long addr = (unsigned long) iov[idx].iov_base; in qib_user_sdma_pin_pkt()
[all …]
/drivers/target/
Dtarget_core_file.c325 struct iovec *iov; in fd_do_rw() local
330 iov = kzalloc(sizeof(struct iovec) * sgl_nents, GFP_KERNEL); in fd_do_rw()
331 if (!iov) { in fd_do_rw()
337 iov[i].iov_len = sg->length; in fd_do_rw()
338 iov[i].iov_base = kmap(sg_page(sg)) + sg->offset; in fd_do_rw()
345 ret = vfs_writev(fd, &iov[0], sgl_nents, &pos); in fd_do_rw()
347 ret = vfs_readv(fd, &iov[0], sgl_nents, &pos); in fd_do_rw()
354 kfree(iov); in fd_do_rw()
476 struct iovec *iov; in fd_execute_write_same() local
505 iov = vzalloc(sizeof(struct iovec) * iov_num); in fd_execute_write_same()
[all …]
Dtarget_core_user.c285 struct iovec *iov; in tcmu_queue_cmd_ring() local
301 req.iov[se_cmd->t_data_nents + 2]), in tcmu_queue_cmd_ring()
363 iov = &entry->req.iov[0]; in tcmu_queue_cmd_ring()
376 iov->iov_len = copy_bytes; in tcmu_queue_cmd_ring()
377 iov->iov_base = (void *) udev->data_off + udev->data_head; in tcmu_queue_cmd_ring()
379 iov++; in tcmu_queue_cmd_ring()
388 iov->iov_len = copy_bytes; in tcmu_queue_cmd_ring()
389 iov->iov_base = (void *) udev->data_off + udev->data_head; in tcmu_queue_cmd_ring()
398 iov++; in tcmu_queue_cmd_ring()
/drivers/xen/xenbus/
Dxenbus_xs.c619 struct kvec iov[2]; in xs_watch() local
621 iov[0].iov_base = (void *)path; in xs_watch()
622 iov[0].iov_len = strlen(path) + 1; in xs_watch()
623 iov[1].iov_base = (void *)token; in xs_watch()
624 iov[1].iov_len = strlen(token) + 1; in xs_watch()
626 return xs_error(xs_talkv(XBT_NIL, XS_WATCH, iov, in xs_watch()
627 ARRAY_SIZE(iov), NULL)); in xs_watch()
632 struct kvec iov[2]; in xs_unwatch() local
634 iov[0].iov_base = (char *)path; in xs_unwatch()
635 iov[0].iov_len = strlen(path) + 1; in xs_unwatch()
[all …]
/drivers/net/ethernet/broadcom/bnx2x/
Dbnx2x_sriov.c1093 struct bnx2x_sriov *iov = &bp->vfdb->sriov; in bnx2x_vf_bus() local
1095 return dev->bus->number + ((dev->devfn + iov->offset + in bnx2x_vf_bus()
1096 iov->stride * vfid) >> 8); in bnx2x_vf_bus()
1102 struct bnx2x_sriov *iov = &bp->vfdb->sriov; in bnx2x_vf_devfn() local
1104 return (dev->devfn + iov->offset + iov->stride * vfid) & 0xff; in bnx2x_vf_devfn()
1111 struct bnx2x_sriov *iov = &bp->vfdb->sriov; in bnx2x_vf_set_bars() local
1117 size /= iov->total; in bnx2x_vf_set_bars()
1166 static int bnx2x_sriov_pci_cfg_info(struct bnx2x *bp, struct bnx2x_sriov *iov) in bnx2x_sriov_pci_cfg_info() argument
1177 iov->pos = pos; in bnx2x_sriov_pci_cfg_info()
1179 pci_read_config_word(dev, pos + PCI_SRIOV_CTRL, &iov->ctrl); in bnx2x_sriov_pci_cfg_info()
[all …]
/drivers/staging/lustre/lnet/klnds/socklnd/
Dsocklnd_cb.c82 tx->tx_iov = tx->tx_frags.virt.iov; in ksocknal_alloc_tx_noop()
113 struct iovec *iov = tx->tx_iov; in ksocknal_send_iov() local
133 if (nob < (int) iov->iov_len) { in ksocknal_send_iov()
134 iov->iov_base = (void *)((char *)iov->iov_base + nob); in ksocknal_send_iov()
135 iov->iov_len -= nob; in ksocknal_send_iov()
139 nob -= iov->iov_len; in ksocknal_send_iov()
140 tx->tx_iov = ++iov; in ksocknal_send_iov()
254 struct iovec *iov = conn->ksnc_rx_iov; in ksocknal_recv_iov() local
282 if (nob < (int)iov->iov_len) { in ksocknal_recv_iov()
283 iov->iov_len -= nob; in ksocknal_recv_iov()
[all …]
Dsocklnd_lib-linux.c213 struct iovec *iov = conn->ksnc_rx_iov; in ksocknal_lib_recv_iov() local
229 scratchiov[i] = iov[i]; in ksocknal_lib_recv_iov()
248 fragnob = iov[i].iov_len; in ksocknal_lib_recv_iov()
253 iov[i].iov_base, fragnob); in ksocknal_lib_recv_iov()
272 struct iovec *iov, struct page **pages) in ksocknal_lib_kiov_vmap() argument
300 iov->iov_base = addr + kiov[0].kiov_offset; in ksocknal_lib_kiov_vmap()
301 iov->iov_len = nob; in ksocknal_lib_kiov_vmap()
/drivers/media/dvb-core/
Ddvb_net.c78 static inline __u32 iov_crc32( __u32 c, struct kvec *iov, unsigned int cnt ) in iov_crc32() argument
82 c = crc32_be( c, iov[j].iov_base, iov[j].iov_len ); in iov_crc32()
611 struct kvec iov[3] = { in dvb_net_ule() local
623 ule_crc = iov_crc32(ule_crc, iov, 3); in dvb_net_ule()
634 hexdump( iov[0].iov_base, iov[0].iov_len ); in dvb_net_ule()
635 hexdump( iov[1].iov_base, iov[1].iov_len ); in dvb_net_ule()
636 hexdump( iov[2].iov_base, iov[2].iov_len ); in dvb_net_ule()
/drivers/staging/lustre/include/linux/lnet/
Dlib-lnet.h392 size = offsetof(lnet_libmd_t, md_iov.iov[niov]); in lnet_md_alloc()
416 size = offsetof(lnet_libmd_t, md_iov.iov[md->md_niov]); in lnet_md_free()
755 unsigned int lnet_iov_nob(unsigned int niov, struct iovec *iov);
760 unsigned int lnet_kiov_nob(unsigned int niov, lnet_kiov_t *iov);
769 void lnet_copy_kiov2iov(unsigned int niov, struct iovec *iov,
775 unsigned int niov, struct iovec *iov,

123