/drivers/usb/usbip/ |
D | usbip_event.c | 16 struct usbip_device *ud; member 22 static void set_event(struct usbip_device *ud, unsigned long event) in set_event() argument 26 spin_lock_irqsave(&ud->lock, flags); in set_event() 27 ud->event |= event; in set_event() 28 spin_unlock_irqrestore(&ud->lock, flags); in set_event() 31 static void unset_event(struct usbip_device *ud, unsigned long event) in unset_event() argument 35 spin_lock_irqsave(&ud->lock, flags); in unset_event() 36 ud->event &= ~event; in unset_event() 37 spin_unlock_irqrestore(&ud->lock, flags); in unset_event() 43 struct usbip_device *ud = NULL; in get_event() local [all …]
|
D | stub_dev.c | 29 spin_lock_irq(&sdev->ud.lock); in usbip_status_show() 30 status = sdev->ud.status; in usbip_status_show() 31 spin_unlock_irq(&sdev->ud.lock); in usbip_status_show() 66 mutex_lock(&sdev->ud.sysfs_lock); in usbip_sockfd_store() 67 spin_lock_irq(&sdev->ud.lock); in usbip_sockfd_store() 69 if (sdev->ud.status != SDEV_ST_AVAILABLE) { in usbip_sockfd_store() 87 spin_unlock_irq(&sdev->ud.lock); in usbip_sockfd_store() 88 tcp_rx = kthread_create(stub_rx_loop, &sdev->ud, "stub_rx"); in usbip_sockfd_store() 93 tcp_tx = kthread_create(stub_tx_loop, &sdev->ud, "stub_tx"); in usbip_sockfd_store() 105 spin_lock_irq(&sdev->ud.lock); in usbip_sockfd_store() [all …]
|
D | vudc_sysfs.c | 115 mutex_lock(&udc->ud.sysfs_lock); in usbip_sockfd_store() 131 spin_lock(&udc->ud.lock); in usbip_sockfd_store() 133 if (udc->ud.status != SDEV_ST_AVAILABLE) { in usbip_sockfd_store() 153 spin_unlock(&udc->ud.lock); in usbip_sockfd_store() 156 tcp_rx = kthread_create(&v_rx_loop, &udc->ud, "vudc_rx"); in usbip_sockfd_store() 159 mutex_unlock(&udc->ud.sysfs_lock); in usbip_sockfd_store() 162 tcp_tx = kthread_create(&v_tx_loop, &udc->ud, "vudc_tx"); in usbip_sockfd_store() 166 mutex_unlock(&udc->ud.sysfs_lock); in usbip_sockfd_store() 176 spin_lock(&udc->ud.lock); in usbip_sockfd_store() 178 udc->ud.tcp_socket = socket; in usbip_sockfd_store() [all …]
|
D | vhci_sysfs.c | 42 port, vdev->ud.status); in port_show_vhci() 45 port, vdev->ud.status); in port_show_vhci() 47 if (vdev->ud.status == VDEV_ST_USED) { in port_show_vhci() 51 vdev->ud.sockfd, in port_show_vhci() 87 spin_lock(&vdev->ud.lock); in status_show_vhci() 90 spin_unlock(&vdev->ud.lock); in status_show_vhci() 96 spin_lock(&vdev->ud.lock); in status_show_vhci() 99 spin_unlock(&vdev->ud.lock); in status_show_vhci() 188 mutex_lock(&vdev->ud.sysfs_lock); in vhci_port_disconnect() 192 spin_lock(&vdev->ud.lock); in vhci_port_disconnect() [all …]
|
D | vudc_dev.c | 176 usbip_start_eh(&udc->ud); in vgadget_pullup() 182 usbip_event_add(&udc->ud, VUDC_EVENT_REMOVED); in vgadget_pullup() 183 usbip_stop_eh(&udc->ud); /* Wait for eh completion */ in vgadget_pullup() 429 static void vudc_shutdown(struct usbip_device *ud) in vudc_shutdown() argument 431 struct vudc *udc = container_of(ud, struct vudc, ud); in vudc_shutdown() 436 if (ud->tcp_socket) in vudc_shutdown() 437 kernel_sock_shutdown(ud->tcp_socket, SHUT_RDWR); in vudc_shutdown() 439 if (ud->tcp_rx) { in vudc_shutdown() 440 kthread_stop_put(ud->tcp_rx); in vudc_shutdown() 441 ud->tcp_rx = NULL; in vudc_shutdown() [all …]
|
D | vhci_rx.c | 60 struct usbip_device *ud = &vdev->ud; in vhci_recv_ret_submit() local 72 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in vhci_recv_ret_submit() 80 if (usbip_recv_xbuff(ud, urb) < 0) { in vhci_recv_ret_submit() 86 if (usbip_recv_iso(ud, urb) < 0) { in vhci_recv_ret_submit() 92 usbip_pad_iso(ud, urb); in vhci_recv_ret_submit() 197 static void vhci_rx_pdu(struct usbip_device *ud) in vhci_rx_pdu() argument 201 struct vhci_device *vdev = container_of(ud, struct vhci_device, ud); in vhci_rx_pdu() 208 ret = usbip_recv(ud->tcp_socket, &pdu, sizeof(pdu)); in vhci_rx_pdu() 220 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in vhci_rx_pdu() 225 usbip_event_add(ud, VDEV_EVENT_DOWN); in vhci_rx_pdu() [all …]
|
D | vudc_rx.c | 98 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_MALLOC); in v_recv_cmd_submit() 113 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_TCP); in v_recv_cmd_submit() 144 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_MALLOC); in v_recv_cmd_submit() 167 ret = usbip_recv_xbuff(&udc->ud, urb_p->urb); in v_recv_cmd_submit() 171 ret = usbip_recv_iso(&udc->ud, urb_p->urb); in v_recv_cmd_submit() 187 static int v_rx_pdu(struct usbip_device *ud) in v_rx_pdu() argument 191 struct vudc *udc = container_of(ud, struct vudc, ud); in v_rx_pdu() 194 ret = usbip_recv(ud->tcp_socket, &pdu, sizeof(pdu)); in v_rx_pdu() 196 usbip_event_add(ud, VUDC_EVENT_ERROR_TCP); in v_rx_pdu() 203 spin_lock_irq(&ud->lock); in v_rx_pdu() [all …]
|
D | vhci_hcd.c | 462 if (vhci_hcd->vdev[rhport].ud.status == in vhci_hub_control() 464 vhci_hcd->vdev[rhport].ud.status == in vhci_hub_control() 469 vhci_hcd->vdev[rhport].ud.status); in vhci_hub_control() 672 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_MALLOC); in vhci_tx_urb() 724 spin_lock(&vdev->ud.lock); in vhci_urb_enqueue() 725 if (vdev->ud.status == VDEV_ST_NULL || in vhci_urb_enqueue() 726 vdev->ud.status == VDEV_ST_ERROR) { in vhci_urb_enqueue() 728 spin_unlock(&vdev->ud.lock); in vhci_urb_enqueue() 732 spin_unlock(&vdev->ud.lock); in vhci_urb_enqueue() 767 spin_lock(&vdev->ud.lock); in vhci_urb_enqueue() [all …]
|
D | stub_rx.c | 281 struct usbip_device *ud = &sdev->ud; in valid_request() local 285 spin_lock_irq(&ud->lock); in valid_request() 286 if (ud->status == SDEV_ST_USED) { in valid_request() 290 spin_unlock_irq(&ud->lock); in valid_request() 300 struct usbip_device *ud = &sdev->ud; in stub_priv_alloc() local 309 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC); in stub_priv_alloc() 441 static int stub_recv_xbuff(struct usbip_device *ud, struct stub_priv *priv) in stub_recv_xbuff() argument 447 ret = usbip_recv_xbuff(ud, priv->urbs[i]); in stub_recv_xbuff() 459 struct usbip_device *ud = &sdev->ud; in stub_recv_cmd_submit() local 555 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC); in stub_recv_cmd_submit() [all …]
|
D | usbip_common.h | 321 int usbip_recv_iso(struct usbip_device *ud, struct urb *urb); 322 void usbip_pad_iso(struct usbip_device *ud, struct urb *urb); 323 int usbip_recv_xbuff(struct usbip_device *ud, struct urb *urb); 328 int usbip_start_eh(struct usbip_device *ud); 329 void usbip_stop_eh(struct usbip_device *ud); 330 void usbip_event_add(struct usbip_device *ud, unsigned long event); 331 int usbip_event_happened(struct usbip_device *ud); 350 static inline void usbip_kcov_handle_init(struct usbip_device *ud) in usbip_kcov_handle_init() argument 352 ud->kcov_handle = kcov_common_handle(); in usbip_kcov_handle_init() 355 static inline void usbip_kcov_remote_start(struct usbip_device *ud) in usbip_kcov_remote_start() argument [all …]
|
D | vudc_tx.c | 60 ret = kernel_sendmsg(udc->ud.tcp_socket, &msg, iov, in v_send_ret_unlink() 63 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_TCP); in v_send_ret_unlink() 102 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_MALLOC); in v_send_ret_submit() 141 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_TCP); in v_send_ret_submit() 154 usbip_event_add(&udc->ud, in v_send_ret_submit() 166 ret = kernel_sendmsg(udc->ud.tcp_socket, &msg, in v_send_ret_submit() 169 usbip_event_add(&udc->ud, VUDC_EVENT_ERROR_TCP); in v_send_ret_submit() 223 struct usbip_device *ud = (struct usbip_device *) data; in v_tx_loop() local 224 struct vudc *udc = container_of(ud, struct vudc, ud); in v_tx_loop() 228 if (usbip_event_happened(&udc->ud)) in v_tx_loop() [all …]
|
D | usbip_common.c | 659 int usbip_recv_iso(struct usbip_device *ud, struct urb *urb) in usbip_recv_iso() argument 680 ret = usbip_recv(ud->tcp_socket, buff, size); in usbip_recv_iso() 686 if (ud->side == USBIP_STUB || ud->side == USBIP_VUDC) in usbip_recv_iso() 687 usbip_event_add(ud, SDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 689 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 708 if (ud->side == USBIP_STUB || ud->side == USBIP_VUDC) in usbip_recv_iso() 709 usbip_event_add(ud, SDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 711 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP); in usbip_recv_iso() 727 void usbip_pad_iso(struct usbip_device *ud, struct urb *urb) in usbip_pad_iso() argument 761 int usbip_recv_xbuff(struct usbip_device *ud, struct urb *urb) in usbip_recv_xbuff() argument [all …]
|
D | stub_tx.c | 21 usbip_event_add(&sdev->ud, VDEV_EVENT_ERROR_MALLOC); in stub_enqueue_ret_unlink() 92 if (sdev->ud.tcp_socket == NULL) { in stub_complete() 196 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_MALLOC); in stub_send_ret_submit() 292 usbip_event_add(&sdev->ud, in stub_send_ret_submit() 304 usbip_event_add(&sdev->ud, in stub_send_ret_submit() 316 ret = kernel_sendmsg(sdev->ud.tcp_socket, &msg, in stub_send_ret_submit() 324 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_TCP); in stub_send_ret_submit() 391 ret = kernel_sendmsg(sdev->ud.tcp_socket, &msg, iov, in stub_send_ret_unlink() 397 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_TCP); in stub_send_ret_unlink() 419 struct usbip_device *ud = data; in stub_tx_loop() local [all …]
|
D | vhci_tx.c | 87 usbip_event_add(&vdev->ud, SDEV_EVENT_ERROR_MALLOC); in vhci_send_cmd_submit() 128 usbip_event_add(&vdev->ud, in vhci_send_cmd_submit() 139 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, iov, iovnum, in vhci_send_cmd_submit() 144 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_TCP); in vhci_send_cmd_submit() 219 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, &iov, 1, txsize); in vhci_send_cmd_unlink() 223 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_TCP); in vhci_send_cmd_unlink() 237 struct usbip_device *ud = data; in vhci_tx_loop() local 238 struct vhci_device *vdev = container_of(ud, struct vhci_device, ud); in vhci_tx_loop()
|
D | stub.h | 24 struct usbip_device ud; member
|
/drivers/dma/ti/ |
D | k3-udma-private.c | 7 int xudma_navss_psil_pair(struct udma_dev *ud, u32 src_thread, u32 dst_thread) in xudma_navss_psil_pair() argument 9 return navss_psil_pair(ud, src_thread, dst_thread); in xudma_navss_psil_pair() 13 int xudma_navss_psil_unpair(struct udma_dev *ud, u32 src_thread, u32 dst_thread) in xudma_navss_psil_unpair() argument 15 return navss_psil_unpair(ud, src_thread, dst_thread); in xudma_navss_psil_unpair() 23 struct udma_dev *ud; in of_xudma_dev_get() local 42 ud = platform_get_drvdata(pdev); in of_xudma_dev_get() 43 if (!ud) { in of_xudma_dev_get() 49 return ud; in of_xudma_dev_get() 53 struct device *xudma_get_device(struct udma_dev *ud) in xudma_get_device() argument 55 return ud->dev; in xudma_get_device() [all …]
|
D | k3-udma.c | 279 struct udma_dev *ud; member 399 static int navss_psil_pair(struct udma_dev *ud, u32 src_thread, u32 dst_thread) in navss_psil_pair() argument 401 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_pair() 409 static int navss_psil_unpair(struct udma_dev *ud, u32 src_thread, in navss_psil_unpair() argument 412 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_unpair() 467 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata() 554 struct udma_dev *ud = container_of(work, typeof(*ud), purge_work); in udma_purge_desc_work() local 559 spin_lock_irqsave(&ud->lock, flags); in udma_purge_desc_work() 560 list_splice_tail_init(&ud->desc_to_purge, &head); in udma_purge_desc_work() 561 spin_unlock_irqrestore(&ud->lock, flags); in udma_purge_desc_work() [all …]
|
D | k3-udma.h | 127 int xudma_navss_psil_pair(struct udma_dev *ud, u32 src_thread, u32 dst_thread); 128 int xudma_navss_psil_unpair(struct udma_dev *ud, u32 src_thread, 132 struct device *xudma_get_device(struct udma_dev *ud); 133 struct k3_ringacc *xudma_get_ringacc(struct udma_dev *ud); 134 void xudma_dev_put(struct udma_dev *ud); 135 u32 xudma_dev_get_psil_base(struct udma_dev *ud); 136 struct udma_tisci_rm *xudma_dev_get_tisci_rm(struct udma_dev *ud); 138 int xudma_alloc_gp_rflow_range(struct udma_dev *ud, int from, int cnt); 139 int xudma_free_gp_rflow_range(struct udma_dev *ud, int from, int cnt); 141 struct udma_tchan *xudma_tchan_get(struct udma_dev *ud, int id); [all …]
|
/drivers/net/wireless/ath/ath5k/ |
D | desc.c | 91 tx_ctl = &desc->ud.ds_tx5210.tx_ctl; in ath5k_hw_setup_2word_tx_desc() 111 memset(&desc->ud.ds_tx5210, 0, sizeof(struct ath5k_hw_5210_tx_desc)); in ath5k_hw_setup_2word_tx_desc() 262 tx_ctl = &desc->ud.ds_tx5212.tx_ctl; in ath5k_hw_setup_4word_tx_desc() 286 memset(&desc->ud.ds_tx5212.tx_stat, 0, in ath5k_hw_setup_4word_tx_desc() 287 sizeof(desc->ud.ds_tx5212.tx_stat)); in ath5k_hw_setup_4word_tx_desc() 406 tx_ctl = &desc->ud.ds_tx5212.tx_ctl; in ath5k_hw_setup_mrr_tx_desc() 448 tx_status = &desc->ud.ds_tx5210.tx_stat; in ath5k_hw_proc_2word_tx_status() 501 tx_status = &desc->ud.ds_tx5212.tx_stat; in ath5k_hw_proc_4word_tx_status() 565 rx_ctl = &desc->ud.ds_rx.rx_ctl; in ath5k_hw_setup_rx_desc() 574 memset(&desc->ud.ds_rx, 0, sizeof(struct ath5k_hw_all_rx_desc)); in ath5k_hw_setup_rx_desc() [all …]
|
/drivers/infiniband/hw/qib/ |
D | qib_ud.c | 341 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; in qib_make_ud_req() 382 ohdr->u.ud.deth[0] = in qib_make_ud_req() 385 ohdr->u.ud.deth[1] = cpu_to_be32(qp->ibqp.qp_num); in qib_make_ud_req() 447 qkey = be32_to_cpu(ohdr->u.ud.deth[0]); in qib_ud_rcv() 448 src_qp = be32_to_cpu(ohdr->u.ud.deth[1]) & RVT_QPN_MASK; in qib_ud_rcv() 511 wc.ex.imm_data = ohdr->u.ud.imm_data; in qib_ud_rcv()
|
/drivers/infiniband/sw/rxe/ |
D | rxe_av.c | 119 ah_num = pkt->wqe->wr.wr.ud.ah_num; in rxe_get_av() 143 return &pkt->wqe->wr.wr.ud.av; in rxe_get_av()
|
/drivers/clk/baikal-t1/ |
D | ccu-pll.c | 91 unsigned long ud, ut; in ccu_pll_reset() local 94 ud = ccu_pll_lock_delay_us(ref_clk, nr); in ccu_pll_reset() 95 ut = ud * CCU_PLL_LOCK_CHECK_RETRIES; in ccu_pll_reset() 101 val & CCU_PLL_CTL_LOCK, ud, ut); in ccu_pll_reset()
|
/drivers/infiniband/hw/hfi1/ |
D | trace.c | 413 be32_to_cpu(eh->ud.deth[0]), in parse_everbs_hdrs() 414 be32_to_cpu(eh->ud.deth[1]) & RVT_QPN_MASK, in parse_everbs_hdrs() 415 be32_to_cpu(eh->ud.deth[1]) >> in parse_everbs_hdrs() 420 be32_to_cpu(eh->ud.deth[0]), in parse_everbs_hdrs() 421 be32_to_cpu(eh->ud.deth[1]) & RVT_QPN_MASK); in parse_everbs_hdrs()
|
D | Makefile | 46 ud.o \
|
/drivers/tty/serial/ |
D | sprd_serial.c | 270 struct sprd_uart_dma *ud, u32 trans_len, in sprd_uart_dma_submit() argument 282 dma_des = dmaengine_prep_slave_single(ud->chn, ud->phys_addr, trans_len, in sprd_uart_dma_submit() 290 ud->cookie = dmaengine_submit(dma_des); in sprd_uart_dma_submit() 291 if (dma_submit_error(ud->cookie)) in sprd_uart_dma_submit() 292 return dma_submit_error(ud->cookie); in sprd_uart_dma_submit() 294 dma_async_issue_pending(ud->chn); in sprd_uart_dma_submit()
|