/drivers/isdn/mISDN/ |
D | hwchannel.c | 25 struct dchannel *dch = container_of(ws, struct dchannel, workq); in dchannel_bh() 48 struct bchannel *bch = container_of(ws, struct bchannel, workq); in bchannel_bh() 78 INIT_WORK(&ch->workq, dchannel_bh); in mISDN_initdchannel() 101 INIT_WORK(&ch->workq, bchannel_bh); in mISDN_initbchannel() 119 flush_work(&ch->workq); in mISDN_freedchannel() 159 cancel_work_sync(&ch->workq); in mISDN_freebchannel()
|
D | l1oip.h | 66 struct work_struct workq; member
|
D | dsp_core.c | 683 schedule_work(&dsp->workq); in dsp_function() 890 schedule_work(&dsp->workq); in dsp_function() 977 cancel_work_sync(&dsp->workq); in dsp_ctrl() 1009 struct dsp *dsp = container_of(work, struct dsp, workq); in dsp_send_bh() 1067 INIT_WORK(&ndsp->workq, (void *)dsp_send_bh); in dspcreate()
|
D | stack.c | 37 wake_up_interruptible(&st->workq); in _queue_message() 288 wait_event_interruptible(st->workq, (st->status & in mISDNStackd() 386 init_waitqueue_head(&newst->workq); in create_stack() 645 wake_up_interruptible(&st->workq); in delete_stack()
|
D | dsp.h | 185 struct work_struct workq; member
|
D | l1oip_core.c | 828 struct l1oip *hc = container_of(work, struct l1oip, workq); in l1oip_send_bh() 847 schedule_work(&hc->workq); in l1oip_keepalive() 1255 cancel_work_sync(&hc->workq); in release_card() 1506 INIT_WORK(&hc->workq, (void *)l1oip_send_bh); in l1oip_init()
|
D | dsp_cmx.c | 1585 schedule_work(&dsp->workq); 1619 schedule_work(&dsp->workq); 1942 schedule_work(&dsp->workq); 1958 schedule_work(&member->dsp->workq);
|
/drivers/hwmon/ |
D | xgene-hwmon.c | 112 struct work_struct workq; member 449 ctx = container_of(work, struct xgene_hwmon_dev, workq); in xgene_hwmon_evt_work() 529 schedule_work(&ctx->workq); in xgene_hwmon_rx_cb() 598 schedule_work(&ctx->workq); in xgene_hwmon_pcc_rx_cb() 635 INIT_WORK(&ctx->workq, xgene_hwmon_evt_work); in xgene_hwmon_probe() 730 schedule_work(&ctx->workq); in xgene_hwmon_probe()
|
/drivers/gpu/drm/msm/hdmi/ |
D | hdmi.c | 75 if (hdmi->workq) { in msm_hdmi_destroy() 76 flush_workqueue(hdmi->workq); in msm_hdmi_destroy() 77 destroy_workqueue(hdmi->workq); in msm_hdmi_destroy() 242 hdmi->workq = alloc_ordered_workqueue("msm_hdmi", 0); in msm_hdmi_init()
|
D | hdmi_hdcp.c | 241 queue_work(hdmi->workq, &hdcp_ctrl->hdcp_reauth_work); in msm_hdmi_hdcp_irq() 461 queue_work(hdmi->workq, &hdcp_ctrl->hdcp_auth_work); in msm_hdmi_hdcp_reauth_work() 561 queue_work(hdmi->workq, &hdcp_ctrl->hdcp_reauth_work); in msm_hdmi_hdcp_auth_fail() 1335 queue_work(hdmi->workq, &hdcp_ctrl->hdcp_auth_work); in msm_hdmi_hdcp_on()
|
D | hdmi.h | 86 struct workqueue_struct *workq; member
|
D | hdmi_connector.c | 282 queue_work(hdmi->workq, &hdmi_connector->hpd_work); in msm_hdmi_connector_irq()
|
/drivers/infiniband/hw/cxgb3/ |
D | cxio_hal.c | 532 rdev_p->ctrl_qp.workq = dma_alloc_coherent( in cxio_hal_init_ctrl_qp() 538 if (!rdev_p->ctrl_qp.workq) { in cxio_hal_init_ctrl_qp() 546 memset(rdev_p->ctrl_qp.workq, 0, in cxio_hal_init_ctrl_qp() 576 rdev_p->ctrl_qp.workq, 1 << T3_CTRL_QP_SIZE_LOG2); in cxio_hal_init_ctrl_qp() 588 * sizeof(union t3_wr), rdev_p->ctrl_qp.workq, in cxio_hal_destroy_ctrl_qp() 629 wqe = (__be64 *)(rdev_p->ctrl_qp.workq + (rdev_p->ctrl_qp.wptr % in cxio_hal_ctrl_qp_write_mem() 671 wqe = (__be64 *)(rdev_p->ctrl_qp.workq + (rdev_p->ctrl_qp.wptr % in cxio_hal_ctrl_qp_write_mem()
|
D | cxio_hal.h | 72 union t3_wr *workq; /* the work request queue */ member
|
D | iwch_cm.c | 105 static struct workqueue_struct *workq; variable 2219 queue_work(workq, &skb_work); in sched() 2261 workq = alloc_ordered_workqueue("iw_cxgb3", WQ_MEM_RECLAIM); in iwch_cm_init() 2262 if (!workq) in iwch_cm_init() 2270 flush_workqueue(workq); in iwch_cm_term() 2271 destroy_workqueue(workq); in iwch_cm_term()
|
/drivers/scsi/ |
D | libiscsi.c | 93 if (ihost->workq) in iscsi_conn_queue_work() 94 queue_work(ihost->workq, &conn->xmitwork); in iscsi_conn_queue_work() 783 if (!ihost->workq) { in __iscsi_conn_send_pdu() 1754 if (!ihost->workq) { in iscsi_queuecommand() 1938 if (ihost->workq) in iscsi_suspend_tx() 1939 flush_workqueue(ihost->workq); in iscsi_suspend_tx() 2662 ihost->workq = create_singlethread_workqueue(ihost->workq_name); in iscsi_host_alloc() 2663 if (!ihost->workq) in iscsi_host_alloc() 2707 if (ihost->workq) in iscsi_host_remove() 2708 destroy_workqueue(ihost->workq); in iscsi_host_remove()
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | cxgb4_main.c | 1253 queue_work(adap->workq, &adap->tid_release_task); in cxgb4_queue_tid_release() 2061 queue_work(adap->workq, &adap->db_full_task); in t4_db_full() 2071 queue_work(adap->workq, &adap->db_drop_task); in t4_db_dropped() 4710 adapter->workq = create_singlethread_workqueue("cxgb4"); in init_one() 4711 if (!adapter->workq) { in init_one() 5047 if (adapter->workq) in init_one() 5048 destroy_workqueue(adapter->workq); in init_one() 5077 destroy_workqueue(adapter->workq); in remove_one()
|
D | cxgb4.h | 834 struct workqueue_struct *workq; member
|
/drivers/infiniband/hw/cxgb4/ |
D | cm.c | 142 static struct workqueue_struct *workq; variable 4121 queue_work(workq, &skb_work); in ep_timeout() 4139 queue_work(workq, &skb_work); in sched() 4243 workq = alloc_ordered_workqueue("iw_cxgb4", WQ_MEM_RECLAIM); in c4iw_cm_init() 4244 if (!workq) in c4iw_cm_init() 4253 flush_workqueue(workq); in c4iw_cm_term() 4254 destroy_workqueue(workq); in c4iw_cm_term()
|
/drivers/isdn/hardware/mISDN/ |
D | avmfritz.c | 860 cancel_work_sync(&bch->workq); in avm_bctrl()
|
D | netjet.c | 815 cancel_work_sync(&bch->workq); in nj_bctrl()
|
D | w6692.c | 1057 cancel_work_sync(&bch->workq); in w6692_bctrl()
|
D | mISDNisar.c | 1591 cancel_work_sync(&bch->workq); in isar_bctrl()
|
D | mISDNipac.c | 1416 cancel_work_sync(&bch->workq); in hscx_bctrl()
|