Lines Matching refs:vgdev
39 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_create_context() local
48 virtio_gpu_cmd_context_create(vgdev, vfpriv->ctx_id, in virtio_gpu_create_context()
59 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_map_ioctl() local
62 return virtio_gpu_mode_dumb_mmap(file, vgdev->ddev, in virtio_gpu_map_ioctl()
77 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_execbuffer_ioctl() local
89 if (vgdev->has_virgl_3d == false) in virtio_gpu_execbuffer_ioctl()
111 if (!dma_fence_match_context(in_fence, vgdev->fence_drv.context)) in virtio_gpu_execbuffer_ioctl()
162 out_fence = virtio_gpu_fence_alloc(vgdev); in virtio_gpu_execbuffer_ioctl()
180 virtio_gpu_cmd_submit(vgdev, buf, exbuf->size, in virtio_gpu_execbuffer_ioctl()
183 virtio_gpu_notify(vgdev); in virtio_gpu_execbuffer_ioctl()
205 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_getparam_ioctl() local
211 value = vgdev->has_virgl_3d == true ? 1 : 0; in virtio_gpu_getparam_ioctl()
228 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_resource_create_ioctl() local
237 if (vgdev->has_virgl_3d) { in virtio_gpu_resource_create_ioctl()
268 fence = virtio_gpu_fence_alloc(vgdev); in virtio_gpu_resource_create_ioctl()
271 ret = virtio_gpu_object_create(vgdev, ¶ms, &qobj, fence); in virtio_gpu_resource_create_ioctl()
320 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_transfer_from_host_ioctl() local
328 if (vgdev->has_virgl_3d == false) in virtio_gpu_transfer_from_host_ioctl()
340 fence = virtio_gpu_fence_alloc(vgdev); in virtio_gpu_transfer_from_host_ioctl()
346 (vgdev, vfpriv->ctx_id, offset, args->level, in virtio_gpu_transfer_from_host_ioctl()
349 virtio_gpu_notify(vgdev); in virtio_gpu_transfer_from_host_ioctl()
362 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_transfer_to_host_ioctl() local
374 if (!vgdev->has_virgl_3d) { in virtio_gpu_transfer_to_host_ioctl()
376 (vgdev, offset, in virtio_gpu_transfer_to_host_ioctl()
386 fence = virtio_gpu_fence_alloc(vgdev); in virtio_gpu_transfer_to_host_ioctl()
391 (vgdev, in virtio_gpu_transfer_to_host_ioctl()
396 virtio_gpu_notify(vgdev); in virtio_gpu_transfer_to_host_ioctl()
436 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_get_caps_ioctl() local
445 if (vgdev->num_capsets == 0) in virtio_gpu_get_caps_ioctl()
452 spin_lock(&vgdev->display_info_lock); in virtio_gpu_get_caps_ioctl()
453 for (i = 0; i < vgdev->num_capsets; i++) { in virtio_gpu_get_caps_ioctl()
454 if (vgdev->capsets[i].id == args->cap_set_id) { in virtio_gpu_get_caps_ioctl()
455 if (vgdev->capsets[i].max_version >= args->cap_set_ver) { in virtio_gpu_get_caps_ioctl()
463 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_get_caps_ioctl()
467 host_caps_size = vgdev->capsets[found_valid].max_size; in virtio_gpu_get_caps_ioctl()
471 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_get_caps_ioctl()
474 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_get_caps_ioctl()
478 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_get_caps_ioctl()
481 ret = virtio_gpu_cmd_get_capset(vgdev, found_valid, args->cap_set_ver, in virtio_gpu_get_caps_ioctl()
485 virtio_gpu_notify(vgdev); in virtio_gpu_get_caps_ioctl()
488 ret = wait_event_timeout(vgdev->resp_wq, in virtio_gpu_get_caps_ioctl()