/drivers/usb/usbip/ |
D | usbip_event.c | 20 static LIST_HEAD(event_list); 47 if (!list_empty(&event_list)) { in get_event() 48 ue = list_first_entry(&event_list, struct usbip_event, node); in get_event() 157 list_for_each_entry_reverse(ue, &event_list, node) { in usbip_event_add() 168 list_add_tail(&ue->node, &event_list); in usbip_event_add()
|
/drivers/acpi/ |
D | evged.c | 45 struct list_head event_list; member 137 list_add_tail(&event->node, &geddev->event_list); in acpi_ged_request_interrupt() 151 INIT_LIST_HEAD(&geddev->event_list); in ged_probe() 168 list_for_each_entry_safe(event, next, &geddev->event_list, node) { in ged_shutdown()
|
/drivers/gpu/drm/ |
D | drm_file.c | 175 INIT_LIST_HEAD(&file->event_list); in drm_file_alloc() 226 list_for_each_entry_safe(e, et, &file_priv->event_list, link) { in drm_events_release() 293 WARN_ON(!list_empty(&file->event_list)); in drm_file_free() 584 if (!list_empty(&file_priv->event_list)) { in drm_read() 585 e = list_first_entry(&file_priv->event_list, in drm_read() 603 !list_empty(&file_priv->event_list)); in drm_read() 615 list_add(&e->link, &file_priv->event_list); in drm_read() 661 if (!list_empty(&file_priv->event_list)) in drm_poll() 804 &e->file_priv->event_list); in drm_send_event_helper()
|
/drivers/dma/idxd/ |
D | perfmon.c | 99 idxd_pmu->event_list[n] = leader; in perfmon_collect_events() 100 idxd_pmu->event_list[n]->hw.idx = n; in perfmon_collect_events() 115 idxd_pmu->event_list[n] = event; in perfmon_collect_events() 116 idxd_pmu->event_list[n]->hw.idx = n; in perfmon_collect_events() 184 event = fake_pmu->event_list[i]; in perfmon_validate_group() 286 event = idxd->idxd_pmu->event_list[i]; in perfmon_counter_overflow() 384 if (event != idxd->idxd_pmu->event_list[i]) in perfmon_pmu_event_stop() 388 idxd->idxd_pmu->event_list[i - 1] = idxd->idxd_pmu->event_list[i]; in perfmon_pmu_event_stop()
|
D | idxd.h | 97 struct perf_event *event_list[IDXD_PMU_EVENT_MAX]; member
|
/drivers/infiniband/hw/mlx5/ |
D | devx.c | 67 struct list_head event_list; /* headed in ev_file->event_list or in member 86 struct list_head event_list; member 1671 struct list_head event_list; member 1685 INIT_LIST_HEAD(&ev_queue->event_list); in devx_init_event_queue() 1729 INIT_LIST_HEAD(&ev_file->event_list); in UVERBS_HANDLER() 1753 list_add_tail(&async_data->list, &ev_queue->event_list); in devx_query_callback() 2100 list_add_tail(&event_sub->event_list, &sub_list); in UVERBS_HANDLER() 2125 list_for_each_entry_safe(event_sub, tmp_sub, &sub_list, event_list) { in UVERBS_HANDLER() 2129 list_del_init(&event_sub->event_list); in UVERBS_HANDLER() 2158 list_for_each_entry_safe(event_sub, tmp_sub, &sub_list, event_list) { in UVERBS_HANDLER() [all …]
|
/drivers/infiniband/core/ |
D | uverbs_main.c | 159 list_for_each_entry_safe(evt, tmp, &uobj->event_list, obj_list) { in ib_uverbs_release_uevent() 224 while (list_empty(&ev_queue->event_list)) { in ib_uverbs_event_read() 235 (!list_empty(&ev_queue->event_list) || in ib_uverbs_event_read() 242 event = list_entry(ev_queue->event_list.next, struct ib_uverbs_event, list); in ib_uverbs_event_read() 248 list_del(ev_queue->event_list.next); in ib_uverbs_event_read() 298 if (!list_empty(&ev_queue->event_list)) in ib_uverbs_event_poll() 384 list_add_tail(&entry->list, &ev_queue->event_list); in ib_uverbs_comp_handler() 419 list_add_tail(&entry->list, &async_file->ev_queue.event_list); in ib_uverbs_async_handler() 433 &eobj->event_list, &eobj->events_reported); in uverbs_uobj_event() 472 INIT_LIST_HEAD(&ev_queue->event_list); in ib_uverbs_init_event_queue()
|
D | ucma.c | 81 struct list_head event_list; member 320 list_add_tail(&uevent->list, &ctx->file->event_list); in ucma_connect_event_handler() 355 list_add_tail(&uevent->list, &ctx->file->event_list); in ucma_event_handler() 387 while (list_empty(&file->event_list)) { in ucma_get_event() 394 !list_empty(&file->event_list))) in ucma_get_event() 400 uevent = list_first_entry(&file->event_list, struct ucma_event, list); in ucma_get_event() 511 list_for_each_entry_safe(uevent, tmp, &mc->ctx->file->event_list, list) { in ucma_cleanup_mc_events() 530 list_for_each_entry_safe(uevent, tmp, &ctx->file->event_list, list) { in ucma_cleanup_ctx_events() 1618 LIST_HEAD(event_list); in ucma_migrate_id() 1664 list_for_each_entry_safe(uevent, tmp, &cur_file->event_list, list) in ucma_migrate_id() [all …]
|
D | uverbs.h | 122 struct list_head event_list; member 185 struct list_head event_list; member
|
D | uverbs_std_types_wq.c | 69 INIT_LIST_HEAD(&obj->uevent.event_list); in UVERBS_HANDLER()
|
D | uverbs_std_types_cq.c | 113 INIT_LIST_HEAD(&obj->uevent.event_list); in UVERBS_HANDLER()
|
D | uverbs_std_types.c | 148 list_for_each_entry_safe(entry, tmp, &event_queue->event_list, list) { in ib_uverbs_free_event_queue()
|
D | uverbs_std_types_srq.c | 103 INIT_LIST_HEAD(&obj->uevent.event_list); in UVERBS_HANDLER()
|
D | uverbs_std_types_qp.c | 230 INIT_LIST_HEAD(&obj->uevent.event_list); in UVERBS_HANDLER()
|
/drivers/gpu/drm/exynos/ |
D | exynos_drm_g2d.c | 228 struct list_head event_list; member 375 list_add_tail(&node->event->base.link, &file_priv->event_list); in g2d_add_cmdlist_to_inuse() 919 if (list_empty(&runqueue_node->event_list)) in g2d_finish_event() 922 e = list_first_entry(&runqueue_node->event_list, in g2d_finish_event() 1307 struct list_head *event_list; in exynos_g2d_exec_ioctl() local 1314 event_list = &runqueue_node->event_list; in exynos_g2d_exec_ioctl() 1316 INIT_LIST_HEAD(event_list); in exynos_g2d_exec_ioctl() 1321 list_splice_init(&file_priv->event_list, event_list); in exynos_g2d_exec_ioctl() 1353 INIT_LIST_HEAD(&file_priv->event_list); in g2d_open()
|
D | exynos_drm_drv.h | 188 struct list_head event_list; member
|
/drivers/firewire/ |
D | core-cdev.c | 53 struct list_head event_list; member 259 INIT_LIST_HEAD(&client->event_list); in fw_device_op_open() 285 list_add_tail(&event->link, &client->event_list); in queue_event() 299 !list_empty(&client->event_list) || in dequeue_event() 304 if (list_empty(&client->event_list) && in dequeue_event() 309 event = list_first_entry(&client->event_list, struct event, link); in dequeue_event() 1761 list_for_each_entry_safe(event, next_event, &client->event_list, link) in fw_device_op_release() 1778 if (!list_empty(&client->event_list)) in fw_device_op_poll()
|
/drivers/net/wireless/ath/ath10k/ |
D | qmi.h | 97 struct list_head event_list; member
|
D | qmi.c | 852 list_add_tail(&event->list, &qmi->event_list); in ath10k_qmi_driver_event_post() 998 while (!list_empty(&qmi->event_list)) { in ath10k_qmi_driver_event_work() 999 event = list_first_entry(&qmi->event_list, in ath10k_qmi_driver_event_work() 1058 INIT_LIST_HEAD(&qmi->event_list); in ath10k_qmi_init()
|
/drivers/scsi/ |
D | virtio_scsi.c | 75 struct virtio_scsi_event_node event_list[VIRTIO_SCSI_EVENT_LEN]; member 255 vscsi->event_list[i].vscsi = vscsi; in virtscsi_kick_event_all() 256 virtscsi_kick_event(vscsi, &vscsi->event_list[i]); in virtscsi_kick_event_all() 272 cancel_work_sync(&vscsi->event_list[i].work); in virtscsi_cancel_event_work()
|
D | scsi_lib.c | 2420 LIST_HEAD(event_list); in scsi_evt_thread() 2434 list_splice_init(&sdev->event_list, &event_list); in scsi_evt_thread() 2437 if (list_empty(&event_list)) in scsi_evt_thread() 2440 list_for_each_safe(this, tmp, &event_list) { in scsi_evt_thread() 2471 list_add_tail(&evt->node, &sdev->event_list); in sdev_evt_send()
|
/drivers/gpu/drm/msm/dp/ |
D | dp_display.c | 115 struct dp_event event_list[DP_EVENT_Q_MAX]; member 142 todo = &dp_priv->event_list[dp_priv->event_pndx++]; in dp_add_event() 167 todo = &dp_priv->event_list[gndx]; in dp_del_event() 1091 todo = &dp_priv->event_list[dp_priv->event_gndx]; in hpd_event_thread() 1099 todo_next = &dp_priv->event_list[dp_priv->event_pndx++]; in hpd_event_thread()
|
/drivers/net/wireless/ath/ath11k/ |
D | qmi.h | 124 struct list_head event_list; member
|
/drivers/media/i2c/ |
D | saa6588.c | 405 poll_wait(a->instance, &s->read_queue, a->event_list); in saa6588_command()
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_usif.c | 115 list_add_tail(&ntfy->p->base.link, &filp->event_list); in usif_notify()
|