• Home
  • Raw
  • Download

Lines Matching refs:vgdev

57 	struct virtio_gpu_device *vgdev = dev->dev_private;  in virtio_gpu_ctrl_ack()  local
59 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
65 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
67 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
70 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
72 vgdev->vbufs = kmem_cache_create("virtio-gpu-vbufs", in virtio_gpu_alloc_vbufs()
76 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
81 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
83 kmem_cache_destroy(vgdev->vbufs); in virtio_gpu_free_vbufs()
84 vgdev->vbufs = NULL; in virtio_gpu_free_vbufs()
88 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
94 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL); in virtio_gpu_get_vbuf()
122 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
128 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
138 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
146 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
152 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
156 return virtio_gpu_alloc_cmd_resp(vgdev, NULL, vbuffer_p, size, in virtio_gpu_alloc_cmd()
161 static void *virtio_gpu_alloc_cmd_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_cb() argument
166 return virtio_gpu_alloc_cmd_resp(vgdev, cb, vbuffer_p, size, in virtio_gpu_alloc_cmd_cb()
171 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
177 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
196 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
205 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
207 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
208 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
210 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
211 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
216 trace_virtio_gpu_cmd_response(vgdev->ctrlq.vq, resp); in virtio_gpu_dequeue_ctrl_func()
239 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
241 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
244 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
248 virtio_gpu_array_put_free_delayed(vgdev, entry->objs); in virtio_gpu_dequeue_ctrl_func()
250 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
256 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
263 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
265 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
266 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
267 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
268 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
272 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
274 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
317 static int virtio_gpu_queue_ctrl_sgs(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_sgs() argument
325 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_sgs()
328 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_ctrl_sgs()
331 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
335 if (vgdev->has_indirect) in virtio_gpu_queue_ctrl_sgs()
339 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
342 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
343 virtio_gpu_notify(vgdev); in virtio_gpu_queue_ctrl_sgs()
344 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= elemcnt); in virtio_gpu_queue_ctrl_sgs()
352 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
365 atomic_inc(&vgdev->pending_commands); in virtio_gpu_queue_ctrl_sgs()
367 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
373 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
417 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
427 void virtio_gpu_notify(struct virtio_gpu_device *vgdev) in virtio_gpu_notify() argument
431 if (!atomic_read(&vgdev->pending_commands)) in virtio_gpu_notify()
434 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
435 atomic_set(&vgdev->pending_commands, 0); in virtio_gpu_notify()
436 notify = virtqueue_kick_prepare(vgdev->ctrlq.vq); in virtio_gpu_notify()
437 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
440 virtqueue_notify(vgdev->ctrlq.vq); in virtio_gpu_notify()
443 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
446 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
449 static void virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
452 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
457 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_cursor()
458 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
466 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
470 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
471 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
472 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
481 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
494 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
503 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
513 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
517 static void virtio_gpu_cmd_unref_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_cb() argument
528 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
535 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
543 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
548 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
556 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
567 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
570 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
578 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
588 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_flush()
591 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
601 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_2d()
605 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_2d()
608 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
620 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
624 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
633 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
643 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
646 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
653 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
654 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
655 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
667 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
668 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
669 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
671 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
672 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
675 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
684 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
685 if (vgdev->capsets) { in virtio_gpu_cmd_get_capset_info_cb()
686 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
687 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
688 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
692 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
693 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
696 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
705 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
706 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
717 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
718 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
733 static void virtio_gpu_cmd_get_edid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_edid_cb() argument
744 if (scanout >= vgdev->num_scanouts) in virtio_gpu_cmd_get_edid_cb()
746 output = vgdev->outputs + scanout; in virtio_gpu_cmd_get_edid_cb()
751 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
754 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
757 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_edid_cb()
760 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
772 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
777 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
779 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
783 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
795 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
802 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
806 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
819 if (idx >= vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
822 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
829 max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
845 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
848 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
850 list_for_each_entry(search_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_get_capset()
851 if (search_ent->id == vgdev->capsets[idx].id && in virtio_gpu_cmd_get_capset()
858 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
859 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
870 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
874 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
877 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
882 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_edids() argument
889 if (WARN_ON(!vgdev->has_edid)) in virtio_gpu_cmd_get_edids()
892 for (scanout = 0; scanout < vgdev->num_scanouts; scanout++) { in virtio_gpu_cmd_get_edids()
899 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
904 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
910 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
916 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
924 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
927 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
933 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
938 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
941 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
949 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
956 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
959 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
967 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
974 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
978 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
987 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
1005 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1010 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
1020 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_3d()
1024 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_3d()
1027 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1039 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1042 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
1053 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1065 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1068 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
1077 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1088 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1091 void virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
1096 virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_attach()
1100 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
1107 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1109 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1112 static void virtio_gpu_cmd_resource_uuid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_uuid_cb() argument
1121 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1131 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1133 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_uuid_cb()
1137 virtio_gpu_cmd_resource_assign_uuid(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_assign_uuid() argument
1147 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1149 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1155 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1163 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()