/drivers/staging/usbip/ |
D | usbip_event.c | 25 static int event_handler(struct usbip_device *ud) in event_handler() argument 32 while (usbip_event_happened(ud)) { in event_handler() 33 usbip_dbg_eh("pending event %lx\n", ud->event); in event_handler() 39 if (ud->event & USBIP_EH_SHUTDOWN) { in event_handler() 40 ud->eh_ops.shutdown(ud); in event_handler() 41 ud->event &= ~USBIP_EH_SHUTDOWN; in event_handler() 45 if (ud->event & USBIP_EH_RESET) { in event_handler() 46 ud->eh_ops.reset(ud); in event_handler() 47 ud->event &= ~USBIP_EH_RESET; in event_handler() 51 if (ud->event & USBIP_EH_UNUSABLE) { in event_handler() [all …]
|
D | stub_dev.c | 70 spin_lock_irq(&sdev->ud.lock); in show_status() 71 status = sdev->ud.status; in show_status() 72 spin_unlock_irq(&sdev->ud.lock); in show_status() 101 spin_lock_irq(&sdev->ud.lock); in store_sockfd() 103 if (sdev->ud.status != SDEV_ST_AVAILABLE) { in store_sockfd() 112 sdev->ud.tcp_socket = socket; in store_sockfd() 114 spin_unlock_irq(&sdev->ud.lock); in store_sockfd() 116 sdev->ud.tcp_rx = kthread_get_run(stub_rx_loop, &sdev->ud, in store_sockfd() 118 sdev->ud.tcp_tx = kthread_get_run(stub_tx_loop, &sdev->ud, in store_sockfd() 121 spin_lock_irq(&sdev->ud.lock); in store_sockfd() [all …]
|
D | vhci_sysfs.c | 56 spin_lock(&vdev->ud.lock); in show_status() 57 out += sprintf(out, "%03u %03u ", i, vdev->ud.status); in show_status() 59 if (vdev->ud.status == VDEV_ST_USED) { in show_status() 62 out += sprintf(out, "%16p ", vdev->ud.tcp_socket); in show_status() 70 spin_unlock(&vdev->ud.lock); in show_status() 91 spin_lock(&vdev->ud.lock); in vhci_port_disconnect() 92 if (vdev->ud.status == VDEV_ST_NULL) { in vhci_port_disconnect() 93 pr_err("not connected %d\n", vdev->ud.status); in vhci_port_disconnect() 96 spin_unlock(&vdev->ud.lock); in vhci_port_disconnect() 103 spin_unlock(&vdev->ud.lock); in vhci_port_disconnect() [all …]
|
D | vhci_hcd.c | 341 if (dum->vdev[rhport].ud.status == in vhci_hub_control() 346 dum->vdev[rhport].ud.status); in vhci_hub_control() 438 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_MALLOC); in vhci_tx_urb() 483 spin_lock(&vdev->ud.lock); in vhci_urb_enqueue() 484 if (vdev->ud.status == VDEV_ST_NULL || in vhci_urb_enqueue() 485 vdev->ud.status == VDEV_ST_ERROR) { in vhci_urb_enqueue() 487 spin_unlock(&vdev->ud.lock); in vhci_urb_enqueue() 491 spin_unlock(&vdev->ud.lock); in vhci_urb_enqueue() 527 spin_lock(&vdev->ud.lock); in vhci_urb_enqueue() 528 vdev->ud.status = VDEV_ST_USED; in vhci_urb_enqueue() [all …]
|
D | vhci_rx.c | 73 struct usbip_device *ud = &vdev->ud; in vhci_recv_ret_submit() local 84 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in vhci_recv_ret_submit() 92 if (usbip_recv_xbuff(ud, urb) < 0) in vhci_recv_ret_submit() 96 if (usbip_recv_iso(ud, urb) < 0) in vhci_recv_ret_submit() 100 usbip_pad_iso(ud, urb); in vhci_recv_ret_submit() 199 static void vhci_rx_pdu(struct usbip_device *ud) in vhci_rx_pdu() argument 203 struct vhci_device *vdev = container_of(ud, struct vhci_device, ud); in vhci_rx_pdu() 210 ret = usbip_recv(ud->tcp_socket, &pdu, sizeof(pdu)); in vhci_rx_pdu() 222 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in vhci_rx_pdu() 227 usbip_event_add(ud, VDEV_EVENT_DOWN); in vhci_rx_pdu() [all …]
|
D | stub_rx.c | 306 struct usbip_device *ud = &sdev->ud; in valid_request() local 310 spin_lock_irq(&ud->lock); in valid_request() 311 if (ud->status == SDEV_ST_USED) { in valid_request() 315 spin_unlock_irq(&ud->lock); in valid_request() 325 struct usbip_device *ud = &sdev->ud; in stub_priv_alloc() local 334 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC); in stub_priv_alloc() 461 struct usbip_device *ud = &sdev->ud; in stub_recv_cmd_submit() local 478 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC); in stub_recv_cmd_submit() 488 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC); in stub_recv_cmd_submit() 498 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC); in stub_recv_cmd_submit() [all …]
|
D | stub_tx.c | 45 usbip_event_add(&sdev->ud, VDEV_EVENT_ERROR_MALLOC); in stub_enqueue_ret_unlink() 184 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_MALLOC); in stub_send_ret_submit() 236 usbip_event_add(&sdev->ud, in stub_send_ret_submit() 248 usbip_event_add(&sdev->ud, in stub_send_ret_submit() 260 ret = kernel_sendmsg(sdev->ud.tcp_socket, &msg, in stub_send_ret_submit() 268 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_TCP); in stub_send_ret_submit() 335 ret = kernel_sendmsg(sdev->ud.tcp_socket, &msg, iov, in stub_send_ret_unlink() 341 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_TCP); in stub_send_ret_unlink() 363 struct usbip_device *ud = data; in stub_tx_loop() local 364 struct stub_device *sdev = container_of(ud, struct stub_device, ud); in stub_tx_loop() [all …]
|
D | vhci_tx.c | 108 usbip_event_add(&vdev->ud, in vhci_send_cmd_submit() 118 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, iov, 3, txsize); in vhci_send_cmd_submit() 123 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_TCP); in vhci_send_cmd_submit() 187 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, iov, 1, txsize); in vhci_send_cmd_unlink() 191 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_TCP); in vhci_send_cmd_unlink() 205 struct usbip_device *ud = data; in vhci_tx_loop() local 206 struct vhci_device *vdev = container_of(ud, struct vhci_device, ud); in vhci_tx_loop()
|
D | usbip_common.h | 327 int usbip_recv_iso(struct usbip_device *ud, struct urb *urb); 328 void usbip_pad_iso(struct usbip_device *ud, struct urb *urb); 329 int usbip_recv_xbuff(struct usbip_device *ud, struct urb *urb); 332 int usbip_start_eh(struct usbip_device *ud); 333 void usbip_stop_eh(struct usbip_device *ud); 334 void usbip_event_add(struct usbip_device *ud, unsigned long event); 335 int usbip_event_happened(struct usbip_device *ud);
|
D | usbip_common.c | 661 int usbip_recv_iso(struct usbip_device *ud, struct urb *urb) in usbip_recv_iso() argument 682 ret = usbip_recv(ud->tcp_socket, buff, size); in usbip_recv_iso() 688 if (ud->side == USBIP_STUB) in usbip_recv_iso() 689 usbip_event_add(ud, SDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 691 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 711 if (ud->side == USBIP_STUB) in usbip_recv_iso() 712 usbip_event_add(ud, SDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 714 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 730 void usbip_pad_iso(struct usbip_device *ud, struct urb *urb) in usbip_pad_iso() argument 764 int usbip_recv_xbuff(struct usbip_device *ud, struct urb *urb) in usbip_recv_xbuff() argument [all …]
|
D | stub.h | 39 struct usbip_device ud; member
|
D | vhci.h | 38 struct usbip_device ud; member
|
/drivers/infiniband/hw/qib/ |
D | qib_ud.c | 61 qp = qib_lookup_qpn(ibp, swqe->wr.wr.ud.remote_qpn); in qib_ud_loopback() 72 ah_attr = &to_iah(swqe->wr.wr.ud.ah)->attr; in qib_ud_loopback() 102 qkey = (int)swqe->wr.wr.ud.remote_qkey < 0 ? in qib_ud_loopback() 103 sqp->qkey : swqe->wr.wr.ud.remote_qkey; in qib_ud_loopback() 206 swqe->wr.wr.ud.pkey_index : 0; in qib_ud_loopback() 273 ah_attr = &to_iah(wqe->wr.wr.ud.ah)->attr; in qib_make_ud_req() 336 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; in qib_make_ud_req() 359 wqe->wr.wr.ud.pkey_index : qp->s_pkey_index); in qib_make_ud_req() 367 cpu_to_be32(wqe->wr.wr.ud.remote_qpn); in qib_make_ud_req() 373 ohdr->u.ud.deth[0] = cpu_to_be32((int)wqe->wr.wr.ud.remote_qkey < 0 ? in qib_make_ud_req() [all …]
|
/drivers/infiniband/hw/ipath/ |
D | ipath_ud.c | 68 qp = ipath_lookup_qpn(&dev->qp_table, swqe->wr.wr.ud.remote_qpn); in ipath_ud_loopback() 80 ((int) swqe->wr.wr.ud.remote_qkey < 0 ? in ipath_ud_loopback() 81 sqp->qkey : swqe->wr.wr.ud.remote_qkey) != qp->qkey)) { in ipath_ud_loopback() 178 ah_attr = &to_iah(swqe->wr.wr.ud.ah)->attr; in ipath_ud_loopback() 283 ah_attr = &to_iah(wqe->wr.wr.ud.ah)->attr; in ipath_make_ud_req() 345 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; in ipath_make_ud_req() 375 cpu_to_be32(wqe->wr.wr.ud.remote_qpn); in ipath_make_ud_req() 381 ohdr->u.ud.deth[0] = cpu_to_be32((int)wqe->wr.wr.ud.remote_qkey < 0 ? in ipath_make_ud_req() 382 qp->qkey : wqe->wr.wr.ud.remote_qkey); in ipath_make_ud_req() 383 ohdr->u.ud.deth[1] = cpu_to_be32(qp->ibqp.qp_num); in ipath_make_ud_req() [all …]
|
/drivers/net/wireless/ath/ath5k/ |
D | desc.c | 91 tx_ctl = &desc->ud.ds_tx5210.tx_ctl; in ath5k_hw_setup_2word_tx_desc() 111 memset(&desc->ud.ds_tx5210, 0, sizeof(struct ath5k_hw_5210_tx_desc)); in ath5k_hw_setup_2word_tx_desc() 262 tx_ctl = &desc->ud.ds_tx5212.tx_ctl; in ath5k_hw_setup_4word_tx_desc() 286 memset(&desc->ud.ds_tx5212.tx_stat, 0, in ath5k_hw_setup_4word_tx_desc() 287 sizeof(desc->ud.ds_tx5212.tx_stat)); in ath5k_hw_setup_4word_tx_desc() 406 tx_ctl = &desc->ud.ds_tx5212.tx_ctl; in ath5k_hw_setup_mrr_tx_desc() 448 tx_status = &desc->ud.ds_tx5210.tx_stat; in ath5k_hw_proc_2word_tx_status() 501 tx_status = &desc->ud.ds_tx5212.tx_stat; in ath5k_hw_proc_4word_tx_status() 565 rx_ctl = &desc->ud.ds_rx.rx_ctl; in ath5k_hw_setup_rx_desc() 574 memset(&desc->ud.ds_rx, 0, sizeof(struct ath5k_hw_all_rx_desc)); in ath5k_hw_setup_rx_desc() [all …]
|
D | desc.h | 357 } ud; member
|
/drivers/infiniband/hw/ehca/ |
D | ehca_reqs.c | 118 struct ib_mad_hdr *mad_hdr = send_wr->wr.ud.mad_hdr; in trace_send_wr_ud() 163 u32 remote_qkey = send_wr->wr.ud.remote_qkey; in ehca_write_swqe() 226 if (send_wr->wr.ud.remote_qkey & 0x80000000) in ehca_write_swqe() 229 wqe_p->destination_qp_number = send_wr->wr.ud.remote_qpn << 8; in ehca_write_swqe() 231 if (unlikely(!send_wr->wr.ud.ah)) { in ehca_write_swqe() 235 if (unlikely(send_wr->wr.ud.remote_qpn == 0)) { in ehca_write_swqe() 239 my_av = container_of(send_wr->wr.ud.ah, struct ehca_av, ib_ah); in ehca_write_swqe() 258 wqe_p->pkeyi = send_wr->wr.ud.pkey_index; in ehca_write_swqe()
|
/drivers/infiniband/hw/mlx4/ |
D | qp.c | 1642 struct mlx4_ib_ah *ah = to_mah(wr->wr.ud.ah); in build_sriov_qp0_header() 1685 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->wr.ud.remote_qpn); in build_sriov_qp0_header() 1748 struct mlx4_ib_ah *ah = to_mah(wr->wr.ud.ah); in build_mlx_header() 1880 ib_get_cached_pkey(ib_dev, sqp->qp.port, wr->wr.ud.pkey_index, &pkey); in build_mlx_header() 1882 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->wr.ud.remote_qpn); in build_mlx_header() 1884 sqp->ud_header.deth.qkey = cpu_to_be32(wr->wr.ud.remote_qkey & 0x80000000 ? in build_mlx_header() 1885 sqp->qkey : wr->wr.ud.remote_qkey); in build_mlx_header() 2053 memcpy(dseg->av, &to_mah(wr->wr.ud.ah)->av, sizeof (struct mlx4_av)); in set_datagram_seg() 2054 dseg->dqpn = cpu_to_be32(wr->wr.ud.remote_qpn); in set_datagram_seg() 2055 dseg->qkey = cpu_to_be32(wr->wr.ud.remote_qkey); in set_datagram_seg() [all …]
|
D | mad.c | 557 wr.wr.ud.ah = ah; in mlx4_ib_send_to_slave() 558 wr.wr.ud.port_num = port; in mlx4_ib_send_to_slave() 559 wr.wr.ud.remote_qkey = IB_QP_SET_QKEY; in mlx4_ib_send_to_slave() 560 wr.wr.ud.remote_qpn = dqpn; in mlx4_ib_send_to_slave() 1158 wr.wr.ud.ah = ah; in mlx4_ib_send_to_wire() 1159 wr.wr.ud.port_num = port; in mlx4_ib_send_to_wire() 1160 wr.wr.ud.pkey_index = wire_pkey_ix; in mlx4_ib_send_to_wire() 1161 wr.wr.ud.remote_qkey = qkey; in mlx4_ib_send_to_wire() 1162 wr.wr.ud.remote_qpn = remote_qpn; in mlx4_ib_send_to_wire()
|
/drivers/staging/crystalhd/ |
D | crystalhd_lnx.c | 112 static inline int crystalhd_user_data(unsigned long ud, void *dr, int size, int set) in crystalhd_user_data() argument 116 if (!ud || !dr) { in crystalhd_user_data() 122 rc = copy_to_user((void *)ud, dr, size); in crystalhd_user_data() 124 rc = copy_from_user(dr, (void *)ud, size); in crystalhd_user_data()
|
/drivers/infiniband/hw/mthca/ |
D | mthca_qp.c | 1487 mthca_ah_grh_present(to_mah(wr->wr.ud.ah)), 0, in build_mlx_header() 1490 err = mthca_read_ah(dev, to_mah(wr->wr.ud.ah), &sqp->ud_header); in build_mlx_header() 1524 wr->wr.ud.pkey_index, &pkey); in build_mlx_header() 1526 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->wr.ud.remote_qpn); in build_mlx_header() 1528 sqp->ud_header.deth.qkey = cpu_to_be32(wr->wr.ud.remote_qkey & 0x80000000 ? in build_mlx_header() 1529 sqp->qkey : wr->wr.ud.remote_qkey); in build_mlx_header() 1586 useg->lkey = cpu_to_be32(to_mah(wr->wr.ud.ah)->key); in set_tavor_ud_seg() 1587 useg->av_addr = cpu_to_be64(to_mah(wr->wr.ud.ah)->avdma); in set_tavor_ud_seg() 1588 useg->dqpn = cpu_to_be32(wr->wr.ud.remote_qpn); in set_tavor_ud_seg() 1589 useg->qkey = cpu_to_be32(wr->wr.ud.remote_qkey); in set_tavor_ud_seg() [all …]
|
/drivers/infiniband/ulp/ipoib/ |
D | ipoib_ib.c | 528 priv->tx_wr.wr.ud.remote_qpn = qpn; in post_send() 529 priv->tx_wr.wr.ud.ah = address; in post_send() 532 priv->tx_wr.wr.ud.mss = skb_shinfo(skb)->gso_size; in post_send() 533 priv->tx_wr.wr.ud.header = head; in post_send() 534 priv->tx_wr.wr.ud.hlen = hlen; in post_send()
|
/drivers/infiniband/core/ |
D | agent.c | 124 mad_send_wr->send_wr.wr.ud.port_num = port_num; in agent_send_response()
|
D | cma.c | 1217 event.param.ud.private_data = ib_event->private_data + offset; in cma_req_handler() 1218 event.param.ud.private_data_len = in cma_req_handler() 2495 event.param.ud.private_data = ib_event->private_data; in cma_sidr_rep_handler() 2496 event.param.ud.private_data_len = IB_CM_SIDR_REP_PRIVATE_DATA_SIZE; in cma_sidr_rep_handler() 2515 &event.param.ud.ah_attr); in cma_sidr_rep_handler() 2516 event.param.ud.qp_num = rep->qpn; in cma_sidr_rep_handler() 2517 event.param.ud.qkey = rep->qkey; in cma_sidr_rep_handler() 2969 event.param.ud.private_data = mc->context; in cma_ib_mc_handler() 2974 &event.param.ud.ah_attr); in cma_ib_mc_handler() 2975 event.param.ud.qp_num = 0xFFFFFF; in cma_ib_mc_handler() [all …]
|
D | uverbs_cmd.c | 2109 next->wr.ud.ah = idr_read_ah(user_wr->wr.ud.ah, in ib_uverbs_post_send() 2111 if (!next->wr.ud.ah) { in ib_uverbs_post_send() 2115 next->wr.ud.remote_qpn = user_wr->wr.ud.remote_qpn; in ib_uverbs_post_send() 2116 next->wr.ud.remote_qkey = user_wr->wr.ud.remote_qkey; in ib_uverbs_post_send() 2184 if (is_ud && wr->wr.ud.ah) in ib_uverbs_post_send() 2185 put_ah_read(wr->wr.ud.ah); in ib_uverbs_post_send()
|