Lines Matching full:gpu
25 #include "virtio-gpu/virglrenderer_hw.h"
144 sim_syncobj_create(struct virtgpu *gpu, bool signaled) in sim_syncobj_create() argument
169 int ret = drmIoctl(gpu->fd, DRM_IOCTL_VIRTGPU_EXECBUFFER, &args); in sim_syncobj_create()
190 sim_syncobj_destroy(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_destroy() argument
276 sim_syncobj_lookup(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_lookup() argument
291 sim_syncobj_reset(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_reset() argument
293 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_reset()
305 sim_syncobj_query(struct virtgpu *gpu, in sim_syncobj_query() argument
309 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_query()
322 sim_syncobj_signal(struct virtgpu *gpu, in sim_syncobj_signal() argument
326 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_signal()
338 sim_syncobj_submit(struct virtgpu *gpu, in sim_syncobj_submit() argument
344 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_submit()
350 vn_log(gpu->instance, "failed to dup sync fd"); in sim_syncobj_submit()
360 vn_log(gpu->instance, "sorry, no simulated timeline semaphore"); in sim_syncobj_submit()
365 vn_log(gpu->instance, "non-monotonic signaling"); in sim_syncobj_submit()
387 sim_syncobj_wait(struct virtgpu *gpu, in sim_syncobj_wait() argument
402 sim_syncobj_lookup(gpu, sync->syncobj_handle); in sim_syncobj_wait()
434 sim_syncobj_export(struct virtgpu *gpu, uint32_t syncobj_handle) in sim_syncobj_export() argument
436 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_export()
452 sim_syncobj_import(struct virtgpu *gpu, uint32_t syncobj_handle, int fd) in sim_syncobj_import() argument
454 struct sim_syncobj *syncobj = sim_syncobj_lookup(gpu, syncobj_handle); in sim_syncobj_import()
458 if (sim_syncobj_submit(gpu, syncobj_handle, fd, 1, false)) in sim_syncobj_import()
469 sim_submit_signal_syncs(struct virtgpu *gpu, in sim_submit_signal_syncs() argument
481 int ret = sim_syncobj_submit(gpu, sync->syncobj_handle, sync_fd, in sim_submit_signal_syncs()
513 sim_submit(struct virtgpu *gpu, const struct vn_renderer_submit *submit) in sim_submit() argument
540 ret = drmIoctl(gpu->fd, DRM_IOCTL_VIRTGPU_EXECBUFFER, &args); in sim_submit()
542 vn_log(gpu->instance, "failed to execbuffer: %s", strerror(errno)); in sim_submit()
547 ret = sim_submit_signal_syncs(gpu, args.fence_fd, batch->syncs, in sim_submit()
563 virtgpu_ioctl(struct virtgpu *gpu, unsigned long request, void *args) in virtgpu_ioctl() argument
565 return drmIoctl(gpu->fd, request, args); in virtgpu_ioctl()
569 virtgpu_ioctl_getparam(struct virtgpu *gpu, uint64_t param) in virtgpu_ioctl_getparam() argument
578 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_GETPARAM, &args); in virtgpu_ioctl_getparam()
583 virtgpu_ioctl_get_caps(struct virtgpu *gpu, in virtgpu_ioctl_get_caps() argument
596 return virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_GET_CAPS, &args); in virtgpu_ioctl_get_caps()
600 virtgpu_ioctl_context_init(struct virtgpu *gpu, in virtgpu_ioctl_context_init() argument
623 return virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_CONTEXT_INIT, &args); in virtgpu_ioctl_context_init()
627 virtgpu_ioctl_resource_create_blob(struct virtgpu *gpu, in virtgpu_ioctl_resource_create_blob() argument
645 if (virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &args)) in virtgpu_ioctl_resource_create_blob()
653 virtgpu_ioctl_resource_info(struct virtgpu *gpu, in virtgpu_ioctl_resource_info() argument
661 return virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_RESOURCE_INFO, info); in virtgpu_ioctl_resource_info()
665 virtgpu_ioctl_gem_close(struct virtgpu *gpu, uint32_t gem_handle) in virtgpu_ioctl_gem_close() argument
671 ASSERTED const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_GEM_CLOSE, &args); in virtgpu_ioctl_gem_close()
676 virtgpu_ioctl_prime_handle_to_fd(struct virtgpu *gpu, in virtgpu_ioctl_prime_handle_to_fd() argument
685 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_PRIME_HANDLE_TO_FD, &args); in virtgpu_ioctl_prime_handle_to_fd()
690 virtgpu_ioctl_prime_fd_to_handle(struct virtgpu *gpu, int fd) in virtgpu_ioctl_prime_fd_to_handle() argument
696 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_PRIME_FD_TO_HANDLE, &args); in virtgpu_ioctl_prime_fd_to_handle()
701 virtgpu_ioctl_map(struct virtgpu *gpu, uint32_t gem_handle, size_t size) in virtgpu_ioctl_map() argument
707 if (virtgpu_ioctl(gpu, DRM_IOCTL_VIRTGPU_MAP, &args)) in virtgpu_ioctl_map()
710 void *ptr = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, gpu->fd, in virtgpu_ioctl_map()
719 virtgpu_ioctl_syncobj_create(struct virtgpu *gpu, bool signaled) in virtgpu_ioctl_syncobj_create() argument
722 return sim_syncobj_create(gpu, signaled); in virtgpu_ioctl_syncobj_create()
729 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_CREATE, &args); in virtgpu_ioctl_syncobj_create()
734 virtgpu_ioctl_syncobj_destroy(struct virtgpu *gpu, uint32_t syncobj_handle) in virtgpu_ioctl_syncobj_destroy() argument
737 sim_syncobj_destroy(gpu, syncobj_handle); in virtgpu_ioctl_syncobj_destroy()
746 virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_DESTROY, &args); in virtgpu_ioctl_syncobj_destroy()
751 virtgpu_ioctl_syncobj_handle_to_fd(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_handle_to_fd() argument
756 return sync_file ? sim_syncobj_export(gpu, syncobj_handle) : -1; in virtgpu_ioctl_syncobj_handle_to_fd()
765 int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD, &args); in virtgpu_ioctl_syncobj_handle_to_fd()
773 virtgpu_ioctl_syncobj_fd_to_handle(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_fd_to_handle() argument
778 return syncobj_handle ? sim_syncobj_import(gpu, syncobj_handle, fd) : 0; in virtgpu_ioctl_syncobj_fd_to_handle()
788 int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE, &args); in virtgpu_ioctl_syncobj_fd_to_handle()
796 virtgpu_ioctl_syncobj_reset(struct virtgpu *gpu, uint32_t syncobj_handle) in virtgpu_ioctl_syncobj_reset() argument
799 return sim_syncobj_reset(gpu, syncobj_handle); in virtgpu_ioctl_syncobj_reset()
807 return virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_RESET, &args); in virtgpu_ioctl_syncobj_reset()
811 virtgpu_ioctl_syncobj_query(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_query() argument
816 return sim_syncobj_query(gpu, syncobj_handle, point); in virtgpu_ioctl_syncobj_query()
825 return virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_QUERY, &args); in virtgpu_ioctl_syncobj_query()
829 virtgpu_ioctl_syncobj_timeline_signal(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_timeline_signal() argument
834 return sim_syncobj_signal(gpu, syncobj_handle, point); in virtgpu_ioctl_syncobj_timeline_signal()
843 return virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_TIMELINE_SIGNAL, &args); in virtgpu_ioctl_syncobj_timeline_signal()
847 virtgpu_ioctl_syncobj_timeline_wait(struct virtgpu *gpu, in virtgpu_ioctl_syncobj_timeline_wait() argument
852 return sim_syncobj_wait(gpu, wait, wait_avail); in virtgpu_ioctl_syncobj_timeline_wait()
881 const int ret = virtgpu_ioctl(gpu, DRM_IOCTL_SYNCOBJ_TIMELINE_WAIT, &args); in virtgpu_ioctl_syncobj_timeline_wait()
889 virtgpu_ioctl_submit(struct virtgpu *gpu, in virtgpu_ioctl_submit() argument
893 return sim_submit(gpu, submit); in virtgpu_ioctl_submit()
903 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_write() local
907 virtgpu_ioctl_syncobj_timeline_signal(gpu, sync->syncobj_handle, val); in virtgpu_sync_write()
917 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_read() local
921 virtgpu_ioctl_syncobj_query(gpu, sync->syncobj_handle, val); in virtgpu_sync_read()
931 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_reset() local
934 int ret = virtgpu_ioctl_syncobj_reset(gpu, sync->syncobj_handle); in virtgpu_sync_reset()
936 ret = virtgpu_ioctl_syncobj_timeline_signal(gpu, sync->syncobj_handle, in virtgpu_sync_reset()
948 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_export_syncobj() local
951 return virtgpu_ioctl_syncobj_handle_to_fd(gpu, sync->syncobj_handle, in virtgpu_sync_export_syncobj()
959 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_destroy() local
962 virtgpu_ioctl_syncobj_destroy(gpu, sync->syncobj_handle); in virtgpu_sync_destroy()
973 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_create_from_syncobj() local
977 syncobj_handle = virtgpu_ioctl_syncobj_create(gpu, false); in virtgpu_sync_create_from_syncobj()
980 if (!virtgpu_ioctl_syncobj_fd_to_handle(gpu, fd, syncobj_handle)) { in virtgpu_sync_create_from_syncobj()
981 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create_from_syncobj()
985 syncobj_handle = virtgpu_ioctl_syncobj_fd_to_handle(gpu, fd, 0); in virtgpu_sync_create_from_syncobj()
992 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create_from_syncobj()
1010 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_sync_create() local
1019 virtgpu_ioctl_syncobj_create(gpu, signaled); in virtgpu_sync_create()
1025 virtgpu_ioctl_syncobj_timeline_signal(gpu, syncobj_handle, initial_val); in virtgpu_sync_create()
1027 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create()
1033 virtgpu_ioctl_syncobj_destroy(gpu, syncobj_handle); in virtgpu_sync_create()
1038 /* we will have a sync_id when shareable is true and virtio-gpu associates in virtgpu_sync_create()
1069 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_map() local
1076 virtgpu_ioctl_map(gpu, bo->gem_handle, bo->base.mmap_size); in virtgpu_bo_map()
1086 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_export_dma_buf() local
1092 ? virtgpu_ioctl_prime_handle_to_fd(gpu, bo->gem_handle, mappable) in virtgpu_bo_export_dma_buf()
1099 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_destroy() local
1102 mtx_lock(&gpu->dma_buf_import_mutex); in virtgpu_bo_destroy()
1108 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_destroy()
1122 virtgpu_ioctl_gem_close(gpu, gem_handle); in virtgpu_bo_destroy()
1124 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_destroy()
1130 virtgpu_bo_blob_flags(struct virtgpu *gpu, in virtgpu_bo_blob_flags() argument
1140 if (gpu->supports_cross_device) in virtgpu_bo_blob_flags()
1154 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_create_from_dma_buf() local
1159 mtx_lock(&gpu->dma_buf_import_mutex); in virtgpu_bo_create_from_dma_buf()
1161 gem_handle = virtgpu_ioctl_prime_fd_to_handle(gpu, fd); in virtgpu_bo_create_from_dma_buf()
1164 bo = util_sparse_array_get(&gpu->bo_array, gem_handle); in virtgpu_bo_create_from_dma_buf()
1166 if (virtgpu_ioctl_resource_info(gpu, gem_handle, &info)) in virtgpu_bo_create_from_dma_buf()
1178 if (info.blob_mem != gpu->bo_blob_mem) in virtgpu_bo_create_from_dma_buf()
1181 blob_flags |= virtgpu_bo_blob_flags(gpu, flags, 0); in virtgpu_bo_create_from_dma_buf()
1218 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_create_from_dma_buf()
1226 virtgpu_ioctl_gem_close(gpu, gem_handle); in virtgpu_bo_create_from_dma_buf()
1227 mtx_unlock(&gpu->dma_buf_import_mutex); in virtgpu_bo_create_from_dma_buf()
1240 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_bo_create_from_device_memory() local
1241 const uint32_t blob_flags = virtgpu_bo_blob_flags(gpu, flags, external_handles); in virtgpu_bo_create_from_device_memory()
1245 gpu, gpu->bo_blob_mem, blob_flags, size, mem_id, &res_id); in virtgpu_bo_create_from_device_memory()
1249 struct virtgpu_bo *bo = util_sparse_array_get(&gpu->bo_array, gem_handle); in virtgpu_bo_create_from_device_memory()
1269 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_shmem_destroy_now() local
1273 virtgpu_ioctl_gem_close(gpu, shmem->gem_handle); in virtgpu_shmem_destroy_now()
1280 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_shmem_destroy() local
1282 if (vn_renderer_shmem_cache_add(&gpu->shmem_cache, shmem)) in virtgpu_shmem_destroy()
1285 virtgpu_shmem_destroy_now(&gpu->base, shmem); in virtgpu_shmem_destroy()
1291 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_shmem_create() local
1294 vn_renderer_shmem_cache_get(&gpu->shmem_cache, size); in virtgpu_shmem_create()
1302 gpu, gpu->shmem_blob_mem, VIRTGPU_BLOB_FLAG_USE_MAPPABLE, size, 0, in virtgpu_shmem_create()
1307 void *ptr = virtgpu_ioctl_map(gpu, gem_handle, size); in virtgpu_shmem_create()
1309 virtgpu_ioctl_gem_close(gpu, gem_handle); in virtgpu_shmem_create()
1314 util_sparse_array_get(&gpu->shmem_array, gem_handle); in virtgpu_shmem_create()
1332 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_wait() local
1334 const int ret = virtgpu_ioctl_syncobj_timeline_wait(gpu, wait, false); in virtgpu_wait()
1345 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_submit() local
1347 const int ret = virtgpu_ioctl_submit(gpu, submit); in virtgpu_submit()
1352 virtgpu_init_renderer_info(struct virtgpu *gpu) in virtgpu_init_renderer_info() argument
1354 struct vn_renderer_info *info = &gpu->base.info; in virtgpu_init_renderer_info()
1358 .hasPrimary = gpu->has_primary, in virtgpu_init_renderer_info()
1360 .primaryMajor = gpu->primary_major, in virtgpu_init_renderer_info()
1361 .primaryMinor = gpu->primary_minor, in virtgpu_init_renderer_info()
1362 .renderMajor = gpu->render_major, in virtgpu_init_renderer_info()
1363 .renderMinor = gpu->render_minor, in virtgpu_init_renderer_info()
1369 if (gpu->bustype == DRM_BUS_PCI) { in virtgpu_init_renderer_info()
1374 .pciDomain = gpu->pci_bus_info.domain, in virtgpu_init_renderer_info()
1375 .pciBus = gpu->pci_bus_info.bus, in virtgpu_init_renderer_info()
1376 .pciDevice = gpu->pci_bus_info.dev, in virtgpu_init_renderer_info()
1377 .pciFunction = gpu->pci_bus_info.func, in virtgpu_init_renderer_info()
1387 const struct virgl_renderer_capset_venus *capset = &gpu->capset.data; in virtgpu_init_renderer_info()
1405 info->max_timeline_count = gpu->max_timeline_count; in virtgpu_init_renderer_info()
1407 if (gpu->bo_blob_mem == VIRTGPU_BLOB_MEM_GUEST_VRAM) in virtgpu_init_renderer_info()
1411 if (gpu->bo_blob_mem == VIRTGPU_BLOB_MEM_HOST3D && capset->use_guest_vram) in virtgpu_init_renderer_info()
1419 struct virtgpu *gpu = (struct virtgpu *)renderer; in virtgpu_destroy() local
1421 vn_renderer_shmem_cache_fini(&gpu->shmem_cache); in virtgpu_destroy()
1423 if (gpu->fd >= 0) in virtgpu_destroy()
1424 close(gpu->fd); in virtgpu_destroy()
1426 mtx_destroy(&gpu->dma_buf_import_mutex); in virtgpu_destroy()
1428 util_sparse_array_finish(&gpu->shmem_array); in virtgpu_destroy()
1429 util_sparse_array_finish(&gpu->bo_array); in virtgpu_destroy()
1431 vk_free(alloc, gpu); in virtgpu_destroy()
1435 virtgpu_init_shmem_blob_mem(ASSERTED struct virtgpu *gpu) in virtgpu_init_shmem_blob_mem() argument
1456 assert(gpu->capset.data.supports_blob_id_0); in virtgpu_init_shmem_blob_mem()
1457 gpu->shmem_blob_mem = VIRTGPU_BLOB_MEM_HOST3D; in virtgpu_init_shmem_blob_mem()
1461 virtgpu_init_context(struct virtgpu *gpu) in virtgpu_init_context() argument
1463 assert(!gpu->capset.version); in virtgpu_init_context()
1464 const int ret = virtgpu_ioctl_context_init(gpu, gpu->capset.id); in virtgpu_init_context()
1467 vn_log(gpu->instance, "failed to initialize context: %s", in virtgpu_init_context()
1477 virtgpu_init_capset(struct virtgpu *gpu) in virtgpu_init_capset() argument
1479 gpu->capset.id = VIRGL_RENDERER_CAPSET_VENUS; in virtgpu_init_capset()
1480 gpu->capset.version = 0; in virtgpu_init_capset()
1483 virtgpu_ioctl_get_caps(gpu, gpu->capset.id, gpu->capset.version, in virtgpu_init_capset()
1484 &gpu->capset.data, sizeof(gpu->capset.data)); in virtgpu_init_capset()
1487 vn_log(gpu->instance, "failed to get venus v%d capset: %s", in virtgpu_init_capset()
1488 gpu->capset.version, strerror(errno)); in virtgpu_init_capset()
1497 virtgpu_init_params(struct virtgpu *gpu) in virtgpu_init_params() argument
1505 val = virtgpu_ioctl_getparam(gpu, required_params[i]); in virtgpu_init_params()
1508 vn_log(gpu->instance, "required kernel param %d is missing", in virtgpu_init_params()
1515 val = virtgpu_ioctl_getparam(gpu, VIRTGPU_PARAM_HOST_VISIBLE); in virtgpu_init_params()
1517 gpu->bo_blob_mem = VIRTGPU_BLOB_MEM_HOST3D; in virtgpu_init_params()
1519 val = virtgpu_ioctl_getparam(gpu, VIRTGPU_PARAM_GUEST_VRAM); in virtgpu_init_params()
1521 gpu->bo_blob_mem = VIRTGPU_BLOB_MEM_GUEST_VRAM; in virtgpu_init_params()
1526 vn_log(gpu->instance, in virtgpu_init_params()
1536 val = virtgpu_ioctl_getparam(gpu, VIRTGPU_PARAM_CROSS_DEVICE); in virtgpu_init_params()
1538 gpu->supports_cross_device = true; in virtgpu_init_params()
1541 gpu->max_timeline_count = 64; in virtgpu_init_params()
1547 virtgpu_open_device(struct virtgpu *gpu, const drmDevicePtr dev) in virtgpu_open_device() argument
1573 vn_log(gpu->instance, "skipping DRM device %s", name); in virtgpu_open_device()
1584 vn_log(gpu->instance, "failed to open %s", node_path); in virtgpu_open_device()
1593 vn_log(gpu->instance, "unknown DRM driver %s version %d", in virtgpu_open_device()
1596 vn_log(gpu->instance, "failed to get DRM driver version"); in virtgpu_open_device()
1605 gpu->fd = fd; in virtgpu_open_device()
1609 gpu->has_primary = true; in virtgpu_open_device()
1610 gpu->primary_major = major(st.st_rdev); in virtgpu_open_device()
1611 gpu->primary_minor = minor(st.st_rdev); in virtgpu_open_device()
1613 gpu->has_primary = false; in virtgpu_open_device()
1614 gpu->primary_major = 0; in virtgpu_open_device()
1615 gpu->primary_minor = 0; in virtgpu_open_device()
1618 gpu->render_major = major(st.st_rdev); in virtgpu_open_device()
1619 gpu->render_minor = minor(st.st_rdev); in virtgpu_open_device()
1621 gpu->bustype = dev->bustype; in virtgpu_open_device()
1623 gpu->pci_bus_info = *dev->businfo.pci; in virtgpu_open_device()
1628 vn_log(gpu->instance, "using DRM device %s", node_path); in virtgpu_open_device()
1634 virtgpu_open(struct virtgpu *gpu) in virtgpu_open() argument
1640 vn_log(gpu->instance, "failed to enumerate DRM devices"); in virtgpu_open()
1646 result = virtgpu_open_device(gpu, devs[i]); in virtgpu_open()
1657 virtgpu_init(struct virtgpu *gpu) in virtgpu_init() argument
1659 util_sparse_array_init(&gpu->shmem_array, sizeof(struct virtgpu_shmem), in virtgpu_init()
1661 util_sparse_array_init(&gpu->bo_array, sizeof(struct virtgpu_bo), 1024); in virtgpu_init()
1663 mtx_init(&gpu->dma_buf_import_mutex, mtx_plain); in virtgpu_init()
1665 VkResult result = virtgpu_open(gpu); in virtgpu_init()
1667 result = virtgpu_init_params(gpu); in virtgpu_init()
1669 result = virtgpu_init_capset(gpu); in virtgpu_init()
1671 result = virtgpu_init_context(gpu); in virtgpu_init()
1675 virtgpu_init_shmem_blob_mem(gpu); in virtgpu_init()
1677 vn_renderer_shmem_cache_init(&gpu->shmem_cache, &gpu->base, in virtgpu_init()
1680 virtgpu_init_renderer_info(gpu); in virtgpu_init()
1682 gpu->base.ops.destroy = virtgpu_destroy; in virtgpu_init()
1683 gpu->base.ops.submit = virtgpu_submit; in virtgpu_init()
1684 gpu->base.ops.wait = virtgpu_wait; in virtgpu_init()
1686 gpu->base.shmem_ops.create = virtgpu_shmem_create; in virtgpu_init()
1687 gpu->base.shmem_ops.destroy = virtgpu_shmem_destroy; in virtgpu_init()
1689 gpu->base.bo_ops.create_from_device_memory = in virtgpu_init()
1691 gpu->base.bo_ops.create_from_dma_buf = virtgpu_bo_create_from_dma_buf; in virtgpu_init()
1692 gpu->base.bo_ops.destroy = virtgpu_bo_destroy; in virtgpu_init()
1693 gpu->base.bo_ops.export_dma_buf = virtgpu_bo_export_dma_buf; in virtgpu_init()
1694 gpu->base.bo_ops.map = virtgpu_bo_map; in virtgpu_init()
1695 gpu->base.bo_ops.flush = virtgpu_bo_flush; in virtgpu_init()
1696 gpu->base.bo_ops.invalidate = virtgpu_bo_invalidate; in virtgpu_init()
1698 gpu->base.sync_ops.create = virtgpu_sync_create; in virtgpu_init()
1699 gpu->base.sync_ops.create_from_syncobj = virtgpu_sync_create_from_syncobj; in virtgpu_init()
1700 gpu->base.sync_ops.destroy = virtgpu_sync_destroy; in virtgpu_init()
1701 gpu->base.sync_ops.export_syncobj = virtgpu_sync_export_syncobj; in virtgpu_init()
1702 gpu->base.sync_ops.reset = virtgpu_sync_reset; in virtgpu_init()
1703 gpu->base.sync_ops.read = virtgpu_sync_read; in virtgpu_init()
1704 gpu->base.sync_ops.write = virtgpu_sync_write; in virtgpu_init()
1714 struct virtgpu *gpu = vk_zalloc(alloc, sizeof(*gpu), VN_DEFAULT_ALIGN, in vn_renderer_create_virtgpu() local
1716 if (!gpu) in vn_renderer_create_virtgpu()
1719 gpu->instance = instance; in vn_renderer_create_virtgpu()
1720 gpu->fd = -1; in vn_renderer_create_virtgpu()
1722 VkResult result = virtgpu_init(gpu); in vn_renderer_create_virtgpu()
1724 virtgpu_destroy(&gpu->base, alloc); in vn_renderer_create_virtgpu()
1728 *renderer = &gpu->base; in vn_renderer_create_virtgpu()