Searched refs:work_tx (Results 1 – 4 of 4) sorted by relevance
351 INIT_DELAYED_WORK(&udev->work_tx, do_tx); in init_usb()562 schedule_work(&udev->work_tx.work); in gdm_usb_send_complete()641 container_of(work, struct lte_udev, work_tx.work); in do_tx()753 schedule_work(&udev->work_tx.work); in gdm_usb_sdu_send()790 schedule_work(&udev->work_tx.work); in gdm_usb_hci_send()933 cancel_work_sync(&udev->work_tx.work); in gdm_usb_suspend()973 schedule_work(&udev->work_tx.work); in gdm_usb_resume()
88 struct delayed_work work_tx; member
148 struct work_struct work_tx; member590 schedule_work(&s->work_tx); in sci_start_tx()1208 schedule_work(&s->work_tx); in sci_dma_tx_complete()1351 cancel_work_sync(&s->work_tx); in sci_dma_tx_release()1405 struct sci_port *s = container_of(work, struct sci_port, work_tx); in sci_dma_tx_work_fn()1614 INIT_WORK(&s->work_tx, sci_dma_tx_work_fn); in sci_request_dma()
383 u8 work_tx; member1119 mp->work_tx &= ~(1 << txq->index); in txq_reclaim()2163 mp->work_tx |= int_cause_ext & INT_EXT_TX; in mv643xx_eth_collect_events()2259 queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx; in mv643xx_eth_poll()2278 } else if (mp->work_tx & queue_mask) { in mv643xx_eth_poll()