• Home
  • Raw
  • Download

Lines Matching full:gpu

25 #include "virtio-gpu/virglrenderer_hw.h"
165 sim_syncobj_create(struct virtgpu *gpu, bool signaled) in sim_syncobj_create() argument
189 int ret = drmIoctl(gpu->fd, DRM_IOCTL_VIRTGPU_EXECBUFFER, &args); in sim_syncobj_create()
210 sim_syncobj_destroy(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_destroy() argument
296 sim_syncobj_lookup(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_lookup() argument
311 sim_syncobj_reset(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_reset() argument
313 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_reset()
325 sim_syncobj_query(struct virtgpu *gpu, in sim_syncobj_query() argument
329 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_query()
342 sim_syncobj_signal(struct virtgpu *gpu, in sim_syncobj_signal() argument
346 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_signal()
358 sim_syncobj_submit(struct virtgpu *gpu, in sim_syncobj_submit() argument
364 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_submit()
370 vn_log(gpu->instance, "failed to dup sync fd"); in sim_syncobj_submit()
380 vn_log(gpu->instance, "sorry, no simulated timeline semaphore"); in sim_syncobj_submit()
385 vn_log(gpu->instance, "non-monotonic signaling"); in sim_syncobj_submit()
407 sim_syncobj_wait(struct virtgpu *gpu, in sim_syncobj_wait() argument
422 sim_syncobj_lookup(gpu, sync->syncobj_handle); in sim_syncobj_wait()
454 sim_syncobj_export(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_export() argument
456 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_export()
472 sim_syncobj_import(struct virtgpu *gpu, uint32_t syncobj_handle, int fd) in sim_syncobj_import() argument
474 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_import()
478 if (sim_syncobj_submit(gpu, syncobj_handle, fd, 1, false)) in sim_syncobj_import()
489 sim_submit_signal_syncs(struct virtgpu *gpu, in sim_submit_signal_syncs() argument
501 int ret = sim_syncobj_submit(gpu, sync->syncobj_handle, sync_fd, in sim_submit_signal_syncs()
533 sim_submit(struct virtgpu *gpu, const struct vn_renderer_submit *submit) in sim_submit() argument
556 ret = drmIoctl(gpu->fd, DRM_IOCTL_VIRTGPU_EXECBUFFER, &args); in sim_submit()
558 vn_log(gpu->instance, "failed to execbuffer: %s", strerror(errno)); in sim_submit()
563 ret = sim_submit_signal_syncs(gpu, args.fence_fd, batch->syncs, in sim_submit()
578 ret = drmIoctl(gpu->fd, DRM_IOCTL_VIRTGPU_EXECBUFFER, &args); in sim_submit()
580 vn_log(gpu->instance, "failed to execbuffer: %s", strerror(errno)); in sim_submit()
591 virtgpu_ioctl(struct virtgpu *gpu, unsigned long request, void *args) in virtgpu_ioctl() argument
593 return drmIoctl(gpu->fd, request, args); in virtgpu_ioctl()
597 virtgpu_ioctl_getparam(struct virtgpu *gpu, uint64_t param) in virtgpu_ioctl_getparam() argument
615 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_GETPARAM, &args); in virtgpu_ioctl_getparam()
620 virtgpu_ioctl_get_caps(struct virtgpu *gpu, in virtgpu_ioctl_get_caps() argument
638 return virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_GET_CAPS, &args); in virtgpu_ioctl_get_caps()
642 virtgpu_ioctl_context_init(struct virtgpu *gpu, in virtgpu_ioctl_context_init() argument
659 return virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_CONTEXT_INIT, &args); in virtgpu_ioctl_context_init()
663 virtgpu_ioctl_resource_create_blob(struct virtgpu *gpu, in virtgpu_ioctl_resource_create_blob() argument
681 if (virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &args)) in virtgpu_ioctl_resource_create_blob()
689 virtgpu_ioctl_resource_info(struct virtgpu *gpu, in virtgpu_ioctl_resource_info() argument
697 return virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_RESOURCE_INFO, info); in virtgpu_ioctl_resource_info()
701 virtgpu_ioctl_gem_close(struct virtgpu *gpu, uint32_t gem_handle) in virtgpu_ioctl_gem_close() argument
707 ASSERTED const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_GEM_CLOSE, &args); in virtgpu_ioctl_gem_close()
712 virtgpu_ioctl_prime_handle_to_fd(struct virtgpu *gpu, in virtgpu_ioctl_prime_handle_to_fd() argument
721 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_PRIME_HANDLE_TO_FD, &args); in virtgpu_ioctl_prime_handle_to_fd()
726 virtgpu_ioctl_prime_fd_to_handle(struct virtgpu *gpu, int fd) in virtgpu_ioctl_prime_fd_to_handle() argument
732 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_PRIME_FD_TO_HANDLE, &args); in virtgpu_ioctl_prime_fd_to_handle()
737 virtgpu_ioctl_map(struct virtgpu *gpu, uint32_t gem_handle, size_t size) in virtgpu_ioctl_map() argument
743 if (virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_MAP, &args)) in virtgpu_ioctl_map()
746 void *ptr = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, gpu->fd, in virtgpu_ioctl_map()
755 virtgpu_ioctl_syncobj_create(struct virtgpu *gpu, bool signaled) in virtgpu_ioctl_syncobj_create() argument
758 return sim_syncobj_create(gpu, signaled); in virtgpu_ioctl_syncobj_create()
765 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_CREATE, &args); in virtgpu_ioctl_syncobj_create()
770 virtgpu_ioctl_syncobj_destroy(struct virtgpu *gpu, uint32_t syncobj_handle) in virtgpu_ioctl_syncobj_destroy() argument
773 sim_syncobj_destroy(gpu, syncobj_handle); in virtgpu_ioctl_syncobj_destroy()
782 virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_DESTROY, &args); in virtgpu_ioctl_syncobj_destroy()
787 virtgpu_ioctl_syncobj_handle_to_fd(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_handle_to_fd() argument
792 return sync_file ? sim_syncobj_export(gpu, syncobj_handle) : -1; in virtgpu_ioctl_syncobj_handle_to_fd()
801 int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD, &args); in virtgpu_ioctl_syncobj_handle_to_fd()
809 virtgpu_ioctl_syncobj_fd_to_handle(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_fd_to_handle() argument
814 return syncobj_handle ? sim_syncobj_import(gpu, syncobj_handle, fd) : 0; in virtgpu_ioctl_syncobj_fd_to_handle()
824 int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE, &args); in virtgpu_ioctl_syncobj_fd_to_handle()
832 virtgpu_ioctl_syncobj_reset(struct virtgpu *gpu, uint32_t syncobj_handle) in virtgpu_ioctl_syncobj_reset() argument
835 return sim_syncobj_reset(gpu, syncobj_handle); in virtgpu_ioctl_syncobj_reset()
843 return virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_RESET, &args); in virtgpu_ioctl_syncobj_reset()
847 virtgpu_ioctl_syncobj_query(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_query() argument
852 return sim_syncobj_query(gpu, syncobj_handle, point); in virtgpu_ioctl_syncobj_query()
861 return virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_QUERY, &args); in virtgpu_ioctl_syncobj_query()
865 virtgpu_ioctl_syncobj_timeline_signal(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_timeline_signal() argument
870 return sim_syncobj_signal(gpu, syncobj_handle, point); in virtgpu_ioctl_syncobj_timeline_signal()
879 return virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_TIMELINE_SIGNAL, &args); in virtgpu_ioctl_syncobj_timeline_signal()
883 virtgpu_ioctl_syncobj_timeline_wait(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_timeline_wait() argument
888 return sim_syncobj_wait(gpu, wait, wait_avail); in virtgpu_ioctl_syncobj_timeline_wait()
917 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_TIMELINE_WAIT, &args); in virtgpu_ioctl_syncobj_timeline_wait()
925 virtgpu_ioctl_submit(struct virtgpu *gpu, in virtgpu_ioctl_submit() argument
929 return sim_submit(gpu, submit); in virtgpu_ioctl_submit()
939 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_write() local
943 virtgpu_ioctl_syncobj_timeline_signal(gpu, sync->syncobj_handle, val); in virtgpu_sync_write()
953 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_read() local
957 virtgpu_ioctl_syncobj_query(gpu, sync->syncobj_handle, val); in virtgpu_sync_read()
967 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_reset() local
970 int ret = virtgpu_ioctl_syncobj_reset(gpu, sync->syncobj_handle); in virtgpu_sync_reset()
972 ret = virtgpu_ioctl_syncobj_timeline_signal(gpu, sync->syncobj_handle, in virtgpu_sync_reset()
984 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_export_syncobj() local
987 return virtgpu_ioctl_syncobj_handle_to_fd(gpu, sync->syncobj_handle, in virtgpu_sync_export_syncobj()
995 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_destroy() local
998 virtgpu_ioctl_syncobj_destroy(gpu, sync->syncobj_handle); in virtgpu_sync_destroy()
1009 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_create_from_syncobj() local
1013 syncobj_handle = virtgpu_ioctl_syncobj_create(gpu, false); in virtgpu_sync_create_from_syncobj()
1016 if (!virtgpu_ioctl_syncobj_fd_to_handle(gpu, fd, syncobj_handle)) { in virtgpu_sync_create_from_syncobj()
1017 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create_from_syncobj()
1021 syncobj_handle = virtgpu_ioctl_syncobj_fd_to_handle(gpu, fd, 0); in virtgpu_sync_create_from_syncobj()
1028 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create_from_syncobj()
1046 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_create() local
1055 virtgpu_ioctl_syncobj_create(gpu, signaled); in virtgpu_sync_create()
1061 virtgpu_ioctl_syncobj_timeline_signal(gpu, syncobj_handle, initial_val); in virtgpu_sync_create()
1063 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create()
1069 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create()
1074 /* we will have a sync_id when shareable is true and virtio-gpu associates in virtgpu_sync_create()
1105 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_map() local
1112 virtgpu_ioctl_map(gpu, bo->gem_handle, bo->base.mmap_size); in virtgpu_bo_map()
1122 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_export_dma_buf() local
1128 ? virtgpu_ioctl_prime_handle_to_fd(gpu, bo->gem_handle, mappable) in virtgpu_bo_export_dma_buf()
1135 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_destroy() local
1138 mtx_lock(&gpu->dma_buf_import_mutex); in virtgpu_bo_destroy()
1144 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_destroy()
1150 virtgpu_ioctl_gem_close(gpu, bo->gem_handle); in virtgpu_bo_destroy()
1155 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_destroy()
1182 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_create_from_dma_buf() local
1187 mtx_lock(&gpu->dma_buf_import_mutex); in virtgpu_bo_create_from_dma_buf()
1189 gem_handle = virtgpu_ioctl_prime_fd_to_handle(gpu, fd); in virtgpu_bo_create_from_dma_buf()
1192 bo = util_sparse_array_get(&gpu->bo_array, gem_handle); in virtgpu_bo_create_from_dma_buf()
1194 if (virtgpu_ioctl_resource_info(gpu, gem_handle, &info)) in virtgpu_bo_create_from_dma_buf()
1201 if (info.blob_mem != gpu->bo_blob_mem) in virtgpu_bo_create_from_dma_buf()
1252 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_create_from_dma_buf()
1260 virtgpu_ioctl_gem_close(gpu, gem_handle); in virtgpu_bo_create_from_dma_buf()
1261 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_create_from_dma_buf()
1274 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_create_from_device_memory() local
1279 gpu, gpu->bo_blob_mem, blob_flags, size, mem_id, &res_id); in virtgpu_bo_create_from_device_memory()
1283 struct virtgpu_bo *bo = util_sparse_array_get(&gpu->bo_array, gem_handle); in virtgpu_bo_create_from_device_memory()
1303 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_shmem_destroy_now() local
1307 virtgpu_ioctl_gem_close(gpu, shmem->gem_handle); in virtgpu_shmem_destroy_now()
1314 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_shmem_destroy() local
1316 if (vn_renderer_shmem_cache_add(&gpu->shmem_cache, shmem)) in virtgpu_shmem_destroy()
1319 virtgpu_shmem_destroy_now(&gpu->base, shmem); in virtgpu_shmem_destroy()
1325 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_shmem_create() local
1328 vn_renderer_shmem_cache_get(&gpu->shmem_cache, size); in virtgpu_shmem_create()
1336 gpu, gpu->shmem_blob_mem, VIRTGPU_BLOB_FLAG_USE_MAPPABLE, size, 0, in virtgpu_shmem_create()
1341 void *ptr = virtgpu_ioctl_map(gpu, gem_handle, size); in virtgpu_shmem_create()
1343 virtgpu_ioctl_gem_close(gpu, gem_handle); in virtgpu_shmem_create()
1348 util_sparse_array_get(&gpu->shmem_array, gem_handle); in virtgpu_shmem_create()
1366 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_wait() local
1368 const int ret = virtgpu_ioctl_syncobj_timeline_wait(gpu, wait, false); in virtgpu_wait()
1379 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_submit() local
1381 const int ret = virtgpu_ioctl_submit(gpu, submit); in virtgpu_submit()
1386 virtgpu_init_renderer_info(struct virtgpu *gpu) in virtgpu_init_renderer_info() argument
1388 struct vn_renderer_info *info = &gpu->base.info; in virtgpu_init_renderer_info()
1390 info->drm.has_primary = gpu->has_primary; in virtgpu_init_renderer_info()
1391 info->drm.primary_major = gpu->primary_major; in virtgpu_init_renderer_info()
1392 info->drm.primary_minor = gpu->primary_minor; in virtgpu_init_renderer_info()
1394 info->drm.render_major = gpu->render_major; in virtgpu_init_renderer_info()
1395 info->drm.render_minor = gpu->render_minor; in virtgpu_init_renderer_info()
1400 if (gpu->bustype == DRM_BUS_PCI) { in virtgpu_init_renderer_info()
1402 info->pci.domain = gpu->pci_bus_info.domain; in virtgpu_init_renderer_info()
1403 info->pci.bus = gpu->pci_bus_info.bus; in virtgpu_init_renderer_info()
1404 info->pci.device = gpu->pci_bus_info.dev; in virtgpu_init_renderer_info()
1405 info->pci.function = gpu->pci_bus_info.func; in virtgpu_init_renderer_info()
1420 info->max_sync_queue_count = gpu->max_sync_queue_count; in virtgpu_init_renderer_info()
1422 const struct virgl_renderer_capset_venus *capset = &gpu->capset.data; in virtgpu_init_renderer_info()
1439 if (gpu->bo_blob_mem == VIRTGPU_BLOB_MEM_GUEST_VRAM) in virtgpu_init_renderer_info()
1447 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_destroy() local
1449 vn_renderer_shmem_cache_fini(&gpu->shmem_cache); in virtgpu_destroy()
1451 if (gpu->fd >= 0) in virtgpu_destroy()
1452 close(gpu->fd); in virtgpu_destroy()
1454 mtx_destroy(&gpu->dma_buf_import_mutex); in virtgpu_destroy()
1456 util_sparse_array_finish(&gpu->shmem_array); in virtgpu_destroy()
1457 util_sparse_array_finish(&gpu->bo_array); in virtgpu_destroy()
1459 vk_free(alloc, gpu); in virtgpu_destroy()
1463 virtgpu_init_shmem_blob_mem(struct virtgpu *gpu) in virtgpu_init_shmem_blob_mem() argument
1484 gpu->shmem_blob_mem = gpu->capset.data.supports_blob_id_0 in virtgpu_init_shmem_blob_mem()
1490 virtgpu_init_context(struct virtgpu *gpu) in virtgpu_init_context() argument
1492 assert(!gpu->capset.version); in virtgpu_init_context()
1493 const int ret = virtgpu_ioctl_context_init(gpu, gpu->capset.id); in virtgpu_init_context()
1496 vn_log(gpu->instance, "failed to initialize context: %s", in virtgpu_init_context()
1506 virtgpu_init_capset(struct virtgpu *gpu) in virtgpu_init_capset() argument
1508 gpu->capset.id = VIRGL_RENDERER_CAPSET_VENUS; in virtgpu_init_capset()
1509 gpu->capset.version = 0; in virtgpu_init_capset()
1512 virtgpu_ioctl_get_caps(gpu, gpu->capset.id, gpu->capset.version, in virtgpu_init_capset()
1513 &gpu->capset.data, sizeof(gpu->capset.data)); in virtgpu_init_capset()
1516 vn_log(gpu->instance, "failed to get venus v%d capset: %s", in virtgpu_init_capset()
1517 gpu->capset.version, strerror(errno)); in virtgpu_init_capset()
1526 virtgpu_init_params(struct virtgpu *gpu) in virtgpu_init_params() argument
1535 val = virtgpu_ioctl_getparam(gpu, required_params[i]); in virtgpu_init_params()
1538 vn_log(gpu->instance, "required kernel param %d is missing", in virtgpu_init_params()
1545 val = virtgpu_ioctl_getparam(gpu, VIRTGPU_PARAM_HOST_VISIBLE); in virtgpu_init_params()
1547 gpu->bo_blob_mem = VIRTGPU_BLOB_MEM_HOST3D; in virtgpu_init_params()
1549 val = virtgpu_ioctl_getparam(gpu, VIRTGPU_PARAM_GUEST_VRAM); in virtgpu_init_params()
1551 gpu->bo_blob_mem = VIRTGPU_BLOB_MEM_GUEST_VRAM; in virtgpu_init_params()
1556 vn_log(gpu->instance, in virtgpu_init_params()
1562 val = virtgpu_ioctl_getparam(gpu, VIRTGPU_PARAM_MAX_SYNC_QUEUE_COUNT); in virtgpu_init_params()
1565 vn_log(gpu->instance, "no sync queue support"); in virtgpu_init_params()
1568 gpu->max_sync_queue_count = val; in virtgpu_init_params()
1574 virtgpu_open_device(struct virtgpu *gpu, const drmDevicePtr dev) in virtgpu_open_device() argument
1600 vn_log(gpu->instance, "skipping DRM device %s", name); in virtgpu_open_device()
1611 vn_log(gpu->instance, "failed to open %s", node_path); in virtgpu_open_device()
1620 vn_log(gpu->instance, "unknown DRM driver %s version %d", in virtgpu_open_device()
1623 vn_log(gpu->instance, "failed to get DRM driver version"); in virtgpu_open_device()
1632 gpu->fd = fd; in virtgpu_open_device()
1636 gpu->has_primary = true; in virtgpu_open_device()
1637 gpu->primary_major = major(st.st_rdev); in virtgpu_open_device()
1638 gpu->primary_minor = minor(st.st_rdev); in virtgpu_open_device()
1640 gpu->has_primary = false; in virtgpu_open_device()
1641 gpu->primary_major = 0; in virtgpu_open_device()
1642 gpu->primary_minor = 0; in virtgpu_open_device()
1645 gpu->render_major = major(st.st_rdev); in virtgpu_open_device()
1646 gpu->render_minor = minor(st.st_rdev); in virtgpu_open_device()
1648 gpu->bustype = dev->bustype; in virtgpu_open_device()
1650 gpu->pci_bus_info = *dev->businfo.pci; in virtgpu_open_device()
1655 vn_log(gpu->instance, "using DRM device %s", node_path); in virtgpu_open_device()
1661 virtgpu_open(struct virtgpu *gpu) in virtgpu_open() argument
1667 vn_log(gpu->instance, "failed to enumerate DRM devices"); in virtgpu_open()
1673 result = virtgpu_open_device(gpu, devs[i]); in virtgpu_open()
1684 virtgpu_init(struct virtgpu *gpu) in virtgpu_init() argument
1686 util_sparse_array_init(&gpu->shmem_array, sizeof(struct virtgpu_shmem), in virtgpu_init()
1688 util_sparse_array_init(&gpu->bo_array, sizeof(struct virtgpu_bo), 1024); in virtgpu_init()
1690 mtx_init(&gpu->dma_buf_import_mutex, mtx_plain); in virtgpu_init()
1692 VkResult result = virtgpu_open(gpu); in virtgpu_init()
1694 result = virtgpu_init_params(gpu); in virtgpu_init()
1696 result = virtgpu_init_capset(gpu); in virtgpu_init()
1698 result = virtgpu_init_context(gpu); in virtgpu_init()
1702 virtgpu_init_shmem_blob_mem(gpu); in virtgpu_init()
1704 vn_renderer_shmem_cache_init(&gpu->shmem_cache, &gpu->base, in virtgpu_init()
1707 virtgpu_init_renderer_info(gpu); in virtgpu_init()
1709 gpu->base.ops.destroy = virtgpu_destroy; in virtgpu_init()
1710 gpu->base.ops.submit = virtgpu_submit; in virtgpu_init()
1711 gpu->base.ops.wait = virtgpu_wait; in virtgpu_init()
1713 gpu->base.shmem_ops.create = virtgpu_shmem_create; in virtgpu_init()
1714 gpu->base.shmem_ops.destroy = virtgpu_shmem_destroy; in virtgpu_init()
1716 gpu->base.bo_ops.create_from_device_memory = in virtgpu_init()
1718 gpu->base.bo_ops.create_from_dma_buf = virtgpu_bo_create_from_dma_buf; in virtgpu_init()
1719 gpu->base.bo_ops.destroy = virtgpu_bo_destroy; in virtgpu_init()
1720 gpu->base.bo_ops.export_dma_buf = virtgpu_bo_export_dma_buf; in virtgpu_init()
1721 gpu->base.bo_ops.map = virtgpu_bo_map; in virtgpu_init()
1722 gpu->base.bo_ops.flush = virtgpu_bo_flush; in virtgpu_init()
1723 gpu->base.bo_ops.invalidate = virtgpu_bo_invalidate; in virtgpu_init()
1725 gpu->base.sync_ops.create = virtgpu_sync_create; in virtgpu_init()
1726 gpu->base.sync_ops.create_from_syncobj = virtgpu_sync_create_from_syncobj; in virtgpu_init()
1727 gpu->base.sync_ops.destroy = virtgpu_sync_destroy; in virtgpu_init()
1728 gpu->base.sync_ops.export_syncobj = virtgpu_sync_export_syncobj; in virtgpu_init()
1729 gpu->base.sync_ops.reset = virtgpu_sync_reset; in virtgpu_init()
1730 gpu->base.sync_ops.read = virtgpu_sync_read; in virtgpu_init()
1731 gpu->base.sync_ops.write = virtgpu_sync_write; in virtgpu_init()
1741 struct virtgpu *gpu = vk_zalloc(alloc, sizeof(*gpu), VN_DEFAULT_ALIGN, in vn_renderer_create_virtgpu() local
1743 if (!gpu) in vn_renderer_create_virtgpu()
1746 gpu->instance = instance; in vn_renderer_create_virtgpu()
1747 gpu->fd = -1; in vn_renderer_create_virtgpu()
1749 VkResult result = virtgpu_init(gpu); in vn_renderer_create_virtgpu()
1751 virtgpu_destroy(&gpu->base, alloc); in vn_renderer_create_virtgpu()
1755 *renderer = &gpu->base; in vn_renderer_create_virtgpu()