• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2021 Collabora Ltd.
3  *
4  * Derived from tu_device.c which is:
5  * Copyright © 2016 Red Hat.
6  * Copyright © 2016 Bas Nieuwenhuizen
7  * Copyright © 2015 Intel Corporation
8  *
9  * Permission is hereby granted, free of charge, to any person obtaining a
10  * copy of this software and associated documentation files (the "Software"),
11  * to deal in the Software without restriction, including without limitation
12  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13  * and/or sell copies of the Software, and to permit persons to whom the
14  * Software is furnished to do so, subject to the following conditions:
15  *
16  * The above copyright notice and this permission notice (including the next
17  * paragraph) shall be included in all copies or substantial portions of the
18  * Software.
19  *
20  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
23  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
25  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26  * DEALINGS IN THE SOFTWARE.
27  */
28 
29 #include "panvk_private.h"
30 
31 #include "pan_bo.h"
32 #include "pan_encoder.h"
33 #include "pan_util.h"
34 #include "vk_common_entrypoints.h"
35 #include "vk_cmd_enqueue_entrypoints.h"
36 
37 #include <fcntl.h>
38 #include <libsync.h>
39 #include <stdbool.h>
40 #include <string.h>
41 #include <sys/mman.h>
42 #include <sys/sysinfo.h>
43 #include <unistd.h>
44 #include <xf86drm.h>
45 
46 #include "drm-uapi/panfrost_drm.h"
47 
48 #include "util/debug.h"
49 #include "util/disk_cache.h"
50 #include "util/strtod.h"
51 #include "vk_format.h"
52 #include "vk_drm_syncobj.h"
53 #include "vk_util.h"
54 
55 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
56 #include <wayland-client.h>
57 #include "wayland-drm-client-protocol.h"
58 #endif
59 
60 #include "panvk_cs.h"
61 
62 VkResult
_panvk_device_set_lost(struct panvk_device * device,const char * file,int line,const char * msg,...)63 _panvk_device_set_lost(struct panvk_device *device,
64                        const char *file, int line,
65                        const char *msg, ...)
66 {
67    /* Set the flag indicating that waits should return in finite time even
68     * after device loss.
69     */
70    p_atomic_inc(&device->_lost);
71 
72    /* TODO: Report the log message through VkDebugReportCallbackEXT instead */
73    fprintf(stderr, "%s:%d: ", file, line);
74    va_list ap;
75    va_start(ap, msg);
76    vfprintf(stderr, msg, ap);
77    va_end(ap);
78 
79    if (env_var_as_boolean("PANVK_ABORT_ON_DEVICE_LOSS", false))
80       abort();
81 
82    return VK_ERROR_DEVICE_LOST;
83 }
84 
85 static int
panvk_device_get_cache_uuid(uint16_t family,void * uuid)86 panvk_device_get_cache_uuid(uint16_t family, void *uuid)
87 {
88    uint32_t mesa_timestamp;
89    uint16_t f = family;
90 
91    if (!disk_cache_get_function_timestamp(panvk_device_get_cache_uuid,
92                                           &mesa_timestamp))
93       return -1;
94 
95    memset(uuid, 0, VK_UUID_SIZE);
96    memcpy(uuid, &mesa_timestamp, 4);
97    memcpy((char *) uuid + 4, &f, 2);
98    snprintf((char *) uuid + 6, VK_UUID_SIZE - 10, "pan");
99    return 0;
100 }
101 
102 static void
panvk_get_driver_uuid(void * uuid)103 panvk_get_driver_uuid(void *uuid)
104 {
105    memset(uuid, 0, VK_UUID_SIZE);
106    snprintf(uuid, VK_UUID_SIZE, "panfrost");
107 }
108 
109 static void
panvk_get_device_uuid(void * uuid)110 panvk_get_device_uuid(void *uuid)
111 {
112    memset(uuid, 0, VK_UUID_SIZE);
113 }
114 
115 static const struct debug_control panvk_debug_options[] = {
116    { "startup", PANVK_DEBUG_STARTUP },
117    { "nir", PANVK_DEBUG_NIR },
118    { "trace", PANVK_DEBUG_TRACE },
119    { "sync", PANVK_DEBUG_SYNC },
120    { "afbc", PANVK_DEBUG_AFBC },
121    { "linear", PANVK_DEBUG_LINEAR },
122    { "dump", PANVK_DEBUG_DUMP },
123    { NULL, 0 }
124 };
125 
126 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
127 #define PANVK_USE_WSI_PLATFORM
128 #endif
129 
130 #define PANVK_API_VERSION VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION)
131 
132 VkResult
panvk_EnumerateInstanceVersion(uint32_t * pApiVersion)133 panvk_EnumerateInstanceVersion(uint32_t *pApiVersion)
134 {
135     *pApiVersion = PANVK_API_VERSION;
136     return VK_SUCCESS;
137 }
138 
139 static const struct vk_instance_extension_table panvk_instance_extensions = {
140    .KHR_get_physical_device_properties2 = true,
141    .EXT_debug_report = true,
142    .EXT_debug_utils = true,
143 
144 #ifdef PANVK_USE_WSI_PLATFORM
145    .KHR_surface = true,
146 #endif
147 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
148    .KHR_wayland_surface = true,
149 #endif
150 };
151 
152 static void
panvk_get_device_extensions(const struct panvk_physical_device * device,struct vk_device_extension_table * ext)153 panvk_get_device_extensions(const struct panvk_physical_device *device,
154                             struct vk_device_extension_table *ext)
155 {
156    *ext = (struct vk_device_extension_table) {
157       .KHR_copy_commands2 = true,
158       .KHR_storage_buffer_storage_class = true,
159 #ifdef PANVK_USE_WSI_PLATFORM
160       .KHR_swapchain = true,
161 #endif
162       .KHR_synchronization2 = true,
163       .KHR_variable_pointers = true,
164       .EXT_custom_border_color = true,
165       .EXT_index_type_uint8 = true,
166       .EXT_vertex_attribute_divisor = true,
167    };
168 }
169 
170 VkResult
panvk_CreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)171 panvk_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
172                      const VkAllocationCallbacks *pAllocator,
173                      VkInstance *pInstance)
174 {
175    struct panvk_instance *instance;
176    VkResult result;
177 
178    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
179 
180    pAllocator = pAllocator ? : vk_default_allocator();
181    instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
182                         VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
183    if (!instance)
184       return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
185 
186    struct vk_instance_dispatch_table dispatch_table;
187 
188    vk_instance_dispatch_table_from_entrypoints(&dispatch_table,
189                                                &panvk_instance_entrypoints,
190                                                true);
191    vk_instance_dispatch_table_from_entrypoints(&dispatch_table,
192                                                &wsi_instance_entrypoints,
193                                                false);
194    result = vk_instance_init(&instance->vk,
195                              &panvk_instance_extensions,
196                              &dispatch_table,
197                              pCreateInfo,
198                              pAllocator);
199    if (result != VK_SUCCESS) {
200       vk_free(pAllocator, instance);
201       return vk_error(NULL, result);
202    }
203 
204    instance->physical_device_count = -1;
205    instance->debug_flags = parse_debug_string(getenv("PANVK_DEBUG"),
206                                               panvk_debug_options);
207 
208    if (instance->debug_flags & PANVK_DEBUG_STARTUP)
209       panvk_logi("Created an instance");
210 
211    VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
212 
213    *pInstance = panvk_instance_to_handle(instance);
214 
215    return VK_SUCCESS;
216 }
217 
218 static void
panvk_physical_device_finish(struct panvk_physical_device * device)219 panvk_physical_device_finish(struct panvk_physical_device *device)
220 {
221    panvk_wsi_finish(device);
222 
223    panvk_arch_dispatch(device->pdev.arch, meta_cleanup, device);
224    panfrost_close_device(&device->pdev);
225    if (device->master_fd != -1)
226       close(device->master_fd);
227 
228    vk_physical_device_finish(&device->vk);
229 }
230 
231 void
panvk_DestroyInstance(VkInstance _instance,const VkAllocationCallbacks * pAllocator)232 panvk_DestroyInstance(VkInstance _instance,
233                       const VkAllocationCallbacks *pAllocator)
234 {
235    VK_FROM_HANDLE(panvk_instance, instance, _instance);
236 
237    if (!instance)
238       return;
239 
240    for (int i = 0; i < instance->physical_device_count; ++i) {
241       panvk_physical_device_finish(instance->physical_devices + i);
242    }
243 
244    vk_instance_finish(&instance->vk);
245    vk_free(&instance->vk.alloc, instance);
246 }
247 
248 static VkResult
panvk_physical_device_init(struct panvk_physical_device * device,struct panvk_instance * instance,drmDevicePtr drm_device)249 panvk_physical_device_init(struct panvk_physical_device *device,
250                            struct panvk_instance *instance,
251                            drmDevicePtr drm_device)
252 {
253    const char *path = drm_device->nodes[DRM_NODE_RENDER];
254    VkResult result = VK_SUCCESS;
255    drmVersionPtr version;
256    int fd;
257    int master_fd = -1;
258 
259    if (!getenv("PAN_I_WANT_A_BROKEN_VULKAN_DRIVER")) {
260       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
261                        "WARNING: panvk is not a conformant vulkan implementation, "
262                        "pass PAN_I_WANT_A_BROKEN_VULKAN_DRIVER=1 if you know what you're doing.");
263    }
264 
265    fd = open(path, O_RDWR | O_CLOEXEC);
266    if (fd < 0) {
267       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
268                        "failed to open device %s", path);
269    }
270 
271    version = drmGetVersion(fd);
272    if (!version) {
273       close(fd);
274       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
275                        "failed to query kernel driver version for device %s",
276                        path);
277    }
278 
279    if (strcmp(version->name, "panfrost")) {
280       drmFreeVersion(version);
281       close(fd);
282       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
283                        "device %s does not use the panfrost kernel driver", path);
284    }
285 
286    drmFreeVersion(version);
287 
288    if (instance->debug_flags & PANVK_DEBUG_STARTUP)
289       panvk_logi("Found compatible device '%s'.", path);
290 
291    struct vk_device_extension_table supported_extensions;
292    panvk_get_device_extensions(device, &supported_extensions);
293 
294    struct vk_physical_device_dispatch_table dispatch_table;
295    vk_physical_device_dispatch_table_from_entrypoints(&dispatch_table,
296                                                       &panvk_physical_device_entrypoints,
297                                                       true);
298    vk_physical_device_dispatch_table_from_entrypoints(&dispatch_table,
299                                                       &wsi_physical_device_entrypoints,
300                                                       false);
301 
302    result = vk_physical_device_init(&device->vk, &instance->vk,
303                                     &supported_extensions,
304                                     &dispatch_table);
305 
306    if (result != VK_SUCCESS) {
307       vk_error(instance, result);
308       goto fail;
309    }
310 
311    device->instance = instance;
312    assert(strlen(path) < ARRAY_SIZE(device->path));
313    strncpy(device->path, path, ARRAY_SIZE(device->path));
314 
315    if (instance->vk.enabled_extensions.KHR_display) {
316       master_fd = open(drm_device->nodes[DRM_NODE_PRIMARY], O_RDWR | O_CLOEXEC);
317       if (master_fd >= 0) {
318          /* TODO: free master_fd is accel is not working? */
319       }
320    }
321 
322    device->master_fd = master_fd;
323    if (instance->debug_flags & PANVK_DEBUG_TRACE)
324       device->pdev.debug |= PAN_DBG_TRACE;
325 
326    device->pdev.debug |= PAN_DBG_NO_CACHE;
327    panfrost_open_device(NULL, fd, &device->pdev);
328    fd = -1;
329 
330    if (device->pdev.arch <= 5) {
331       result = vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
332                          "%s not supported",
333                          device->pdev.model->name);
334       goto fail;
335    }
336 
337    panvk_arch_dispatch(device->pdev.arch, meta_init, device);
338 
339    memset(device->name, 0, sizeof(device->name));
340    sprintf(device->name, "%s", device->pdev.model->name);
341 
342    if (panvk_device_get_cache_uuid(device->pdev.gpu_id, device->cache_uuid)) {
343       result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
344                          "cannot generate UUID");
345       goto fail_close_device;
346    }
347 
348    vk_warn_non_conformant_implementation("panvk");
349 
350    panvk_get_driver_uuid(&device->device_uuid);
351    panvk_get_device_uuid(&device->device_uuid);
352 
353    device->drm_syncobj_type = vk_drm_syncobj_get_type(device->pdev.fd);
354    /* We don't support timelines in the uAPI yet and we don't want it getting
355     * suddenly turned on by vk_drm_syncobj_get_type() without us adding panvk
356     * code for it first.
357     */
358    device->drm_syncobj_type.features &= ~VK_SYNC_FEATURE_TIMELINE;
359 
360    device->sync_types[0] = &device->drm_syncobj_type;
361    device->sync_types[1] = NULL;
362    device->vk.supported_sync_types = device->sync_types;
363 
364    result = panvk_wsi_init(device);
365    if (result != VK_SUCCESS) {
366       vk_error(instance, result);
367       goto fail_close_device;
368    }
369 
370    return VK_SUCCESS;
371 
372 fail_close_device:
373    panfrost_close_device(&device->pdev);
374 fail:
375    if (fd != -1)
376       close(fd);
377    if (master_fd != -1)
378       close(master_fd);
379    return result;
380 }
381 
382 static VkResult
panvk_enumerate_devices(struct panvk_instance * instance)383 panvk_enumerate_devices(struct panvk_instance *instance)
384 {
385    /* TODO: Check for more devices ? */
386    drmDevicePtr devices[8];
387    VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
388    int max_devices;
389 
390    instance->physical_device_count = 0;
391 
392    max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
393 
394    if (instance->debug_flags & PANVK_DEBUG_STARTUP)
395       panvk_logi("Found %d drm nodes", max_devices);
396 
397    if (max_devices < 1)
398       return vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
399 
400    for (unsigned i = 0; i < (unsigned) max_devices; i++) {
401       if ((devices[i]->available_nodes & (1 << DRM_NODE_RENDER)) &&
402           devices[i]->bustype == DRM_BUS_PLATFORM) {
403 
404          result = panvk_physical_device_init(instance->physical_devices +
405                                            instance->physical_device_count,
406                                            instance, devices[i]);
407          if (result == VK_SUCCESS)
408             ++instance->physical_device_count;
409          else if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
410             break;
411       }
412    }
413    drmFreeDevices(devices, max_devices);
414 
415    return result;
416 }
417 
418 VkResult
panvk_EnumeratePhysicalDevices(VkInstance _instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)419 panvk_EnumeratePhysicalDevices(VkInstance _instance,
420                                uint32_t *pPhysicalDeviceCount,
421                                VkPhysicalDevice *pPhysicalDevices)
422 {
423    VK_FROM_HANDLE(panvk_instance, instance, _instance);
424    VK_OUTARRAY_MAKE_TYPED(VkPhysicalDevice, out,
425                           pPhysicalDevices, pPhysicalDeviceCount);
426 
427    VkResult result;
428 
429    if (instance->physical_device_count < 0) {
430       result = panvk_enumerate_devices(instance);
431       if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
432          return result;
433    }
434 
435    for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
436       vk_outarray_append_typed(VkPhysicalDevice, &out, p)
437       {
438          *p = panvk_physical_device_to_handle(instance->physical_devices + i);
439       }
440    }
441 
442    return vk_outarray_status(&out);
443 }
444 
445 VkResult
panvk_EnumeratePhysicalDeviceGroups(VkInstance _instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)446 panvk_EnumeratePhysicalDeviceGroups(VkInstance _instance,
447                                     uint32_t *pPhysicalDeviceGroupCount,
448                                     VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
449 {
450    VK_FROM_HANDLE(panvk_instance, instance, _instance);
451    VK_OUTARRAY_MAKE_TYPED(VkPhysicalDeviceGroupProperties, out,
452                           pPhysicalDeviceGroupProperties,
453                           pPhysicalDeviceGroupCount);
454    VkResult result;
455 
456    if (instance->physical_device_count < 0) {
457       result = panvk_enumerate_devices(instance);
458       if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
459          return result;
460    }
461 
462    for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
463       vk_outarray_append_typed(VkPhysicalDeviceGroupProperties, &out, p)
464       {
465          p->physicalDeviceCount = 1;
466          p->physicalDevices[0] =
467             panvk_physical_device_to_handle(instance->physical_devices + i);
468          p->subsetAllocation = false;
469       }
470    }
471 
472    return VK_SUCCESS;
473 }
474 
475 void
panvk_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)476 panvk_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
477                                  VkPhysicalDeviceFeatures2 *pFeatures)
478 {
479    pFeatures->features = (VkPhysicalDeviceFeatures) {
480       .robustBufferAccess = true,
481       .fullDrawIndexUint32 = true,
482       .independentBlend = true,
483       .logicOp = true,
484       .wideLines = true,
485       .largePoints = true,
486       .textureCompressionETC2 = true,
487       .textureCompressionASTC_LDR = true,
488       .shaderUniformBufferArrayDynamicIndexing = true,
489       .shaderSampledImageArrayDynamicIndexing = true,
490       .shaderStorageBufferArrayDynamicIndexing = true,
491       .shaderStorageImageArrayDynamicIndexing = true,
492    };
493 
494    const VkPhysicalDeviceVulkan11Features core_1_1 = {
495       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
496       .storageBuffer16BitAccess           = false,
497       .uniformAndStorageBuffer16BitAccess = false,
498       .storagePushConstant16              = false,
499       .storageInputOutput16               = false,
500       .multiview                          = false,
501       .multiviewGeometryShader            = false,
502       .multiviewTessellationShader        = false,
503       .variablePointersStorageBuffer      = true,
504       .variablePointers                   = true,
505       .protectedMemory                    = false,
506       .samplerYcbcrConversion             = false,
507       .shaderDrawParameters               = false,
508    };
509 
510    const VkPhysicalDeviceVulkan12Features core_1_2 = {
511       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
512       .samplerMirrorClampToEdge           = false,
513       .drawIndirectCount                  = false,
514       .storageBuffer8BitAccess            = false,
515       .uniformAndStorageBuffer8BitAccess  = false,
516       .storagePushConstant8               = false,
517       .shaderBufferInt64Atomics           = false,
518       .shaderSharedInt64Atomics           = false,
519       .shaderFloat16                      = false,
520       .shaderInt8                         = false,
521 
522       .descriptorIndexing                                   = false,
523       .shaderInputAttachmentArrayDynamicIndexing            = false,
524       .shaderUniformTexelBufferArrayDynamicIndexing         = false,
525       .shaderStorageTexelBufferArrayDynamicIndexing         = false,
526       .shaderUniformBufferArrayNonUniformIndexing           = false,
527       .shaderSampledImageArrayNonUniformIndexing            = false,
528       .shaderStorageBufferArrayNonUniformIndexing           = false,
529       .shaderStorageImageArrayNonUniformIndexing            = false,
530       .shaderInputAttachmentArrayNonUniformIndexing         = false,
531       .shaderUniformTexelBufferArrayNonUniformIndexing      = false,
532       .shaderStorageTexelBufferArrayNonUniformIndexing      = false,
533       .descriptorBindingUniformBufferUpdateAfterBind        = false,
534       .descriptorBindingSampledImageUpdateAfterBind         = false,
535       .descriptorBindingStorageImageUpdateAfterBind         = false,
536       .descriptorBindingStorageBufferUpdateAfterBind        = false,
537       .descriptorBindingUniformTexelBufferUpdateAfterBind   = false,
538       .descriptorBindingStorageTexelBufferUpdateAfterBind   = false,
539       .descriptorBindingUpdateUnusedWhilePending            = false,
540       .descriptorBindingPartiallyBound                      = false,
541       .descriptorBindingVariableDescriptorCount             = false,
542       .runtimeDescriptorArray                               = false,
543 
544       .samplerFilterMinmax                = false,
545       .scalarBlockLayout                  = false,
546       .imagelessFramebuffer               = false,
547       .uniformBufferStandardLayout        = false,
548       .shaderSubgroupExtendedTypes        = false,
549       .separateDepthStencilLayouts        = false,
550       .hostQueryReset                     = false,
551       .timelineSemaphore                  = false,
552       .bufferDeviceAddress                = false,
553       .bufferDeviceAddressCaptureReplay   = false,
554       .bufferDeviceAddressMultiDevice     = false,
555       .vulkanMemoryModel                  = false,
556       .vulkanMemoryModelDeviceScope       = false,
557       .vulkanMemoryModelAvailabilityVisibilityChains = false,
558       .shaderOutputViewportIndex          = false,
559       .shaderOutputLayer                  = false,
560       .subgroupBroadcastDynamicId         = false,
561    };
562 
563    const VkPhysicalDeviceVulkan13Features core_1_3 = {
564       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
565       .robustImageAccess                  = false,
566       .inlineUniformBlock                 = false,
567       .descriptorBindingInlineUniformBlockUpdateAfterBind = false,
568       .pipelineCreationCacheControl       = false,
569       .privateData                        = true,
570       .shaderDemoteToHelperInvocation     = false,
571       .shaderTerminateInvocation          = false,
572       .subgroupSizeControl                = false,
573       .computeFullSubgroups               = false,
574       .synchronization2                   = true,
575       .textureCompressionASTC_HDR         = false,
576       .shaderZeroInitializeWorkgroupMemory = false,
577       .dynamicRendering                   = false,
578       .shaderIntegerDotProduct            = false,
579       .maintenance4                       = false,
580    };
581 
582    vk_foreach_struct(ext, pFeatures->pNext)
583    {
584       if (vk_get_physical_device_core_1_1_feature_ext(ext, &core_1_1))
585          continue;
586       if (vk_get_physical_device_core_1_2_feature_ext(ext, &core_1_2))
587          continue;
588       if (vk_get_physical_device_core_1_3_feature_ext(ext, &core_1_3))
589          continue;
590       switch (ext->sType) {
591       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
592          VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
593             (VkPhysicalDeviceConditionalRenderingFeaturesEXT *) ext;
594          features->conditionalRendering = false;
595          features->inheritedConditionalRendering = false;
596          break;
597       }
598       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
599          VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
600             (VkPhysicalDeviceTransformFeedbackFeaturesEXT *) ext;
601          features->transformFeedback = false;
602          features->geometryStreams = false;
603          break;
604       }
605       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
606          VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
607             (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
608          features->indexTypeUint8 = true;
609          break;
610       }
611       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
612          VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
613             (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
614          features->vertexAttributeInstanceRateDivisor = true;
615          features->vertexAttributeInstanceRateZeroDivisor = true;
616          break;
617       }
618       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
619          VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
620             (VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
621          features->depthClipEnable = true;
622          break;
623       }
624       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
625          VkPhysicalDevice4444FormatsFeaturesEXT *features = (void *)ext;
626          features->formatA4R4G4B4 = true;
627          features->formatA4B4G4R4 = true;
628          break;
629       }
630       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
631          VkPhysicalDeviceCustomBorderColorFeaturesEXT *features = (void *) ext;
632          features->customBorderColors = true;
633          features->customBorderColorWithoutFormat = true;
634          break;
635       }
636       default:
637          break;
638       }
639    }
640 }
641 
642 void
panvk_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)643 panvk_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
644                                    VkPhysicalDeviceProperties2 *pProperties)
645 {
646    VK_FROM_HANDLE(panvk_physical_device, pdevice, physicalDevice);
647 
648    VkSampleCountFlags sample_counts =
649       VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
650 
651    /* make sure that the entire descriptor set is addressable with a signed
652     * 32-bit int. So the sum of all limits scaled by descriptor size has to
653     * be at most 2 GiB. the combined image & samples object count as one of
654     * both. This limit is for the pipeline layout, not for the set layout, but
655     * there is no set limit, so we just set a pipeline limit. I don't think
656     * any app is going to hit this soon. */
657    size_t max_descriptor_set_size =
658       ((1ull << 31) - 16 * MAX_DYNAMIC_BUFFERS) /
659       (32 /* uniform buffer, 32 due to potential space wasted on alignment */ +
660        32 /* storage buffer, 32 due to potential space wasted on alignment */ +
661        32 /* sampler, largest when combined with image */ +
662        64 /* sampled image */ + 64 /* storage image */);
663 
664    const VkPhysicalDeviceLimits limits = {
665       .maxImageDimension1D = (1 << 14),
666       .maxImageDimension2D = (1 << 14),
667       .maxImageDimension3D = (1 << 11),
668       .maxImageDimensionCube = (1 << 14),
669       .maxImageArrayLayers = (1 << 11),
670       .maxTexelBufferElements = 128 * 1024 * 1024,
671       .maxUniformBufferRange = UINT32_MAX,
672       .maxStorageBufferRange = UINT32_MAX,
673       .maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
674       .maxMemoryAllocationCount = UINT32_MAX,
675       .maxSamplerAllocationCount = 64 * 1024,
676       .bufferImageGranularity = 64,          /* A cache line */
677       .sparseAddressSpaceSize = 0xffffffffu, /* buffer max size */
678       .maxBoundDescriptorSets = MAX_SETS,
679       .maxPerStageDescriptorSamplers = max_descriptor_set_size,
680       .maxPerStageDescriptorUniformBuffers = max_descriptor_set_size,
681       .maxPerStageDescriptorStorageBuffers = max_descriptor_set_size,
682       .maxPerStageDescriptorSampledImages = max_descriptor_set_size,
683       .maxPerStageDescriptorStorageImages = max_descriptor_set_size,
684       .maxPerStageDescriptorInputAttachments = max_descriptor_set_size,
685       .maxPerStageResources = max_descriptor_set_size,
686       .maxDescriptorSetSamplers = max_descriptor_set_size,
687       .maxDescriptorSetUniformBuffers = max_descriptor_set_size,
688       .maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
689       .maxDescriptorSetStorageBuffers = max_descriptor_set_size,
690       .maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
691       .maxDescriptorSetSampledImages = max_descriptor_set_size,
692       .maxDescriptorSetStorageImages = max_descriptor_set_size,
693       .maxDescriptorSetInputAttachments = max_descriptor_set_size,
694       .maxVertexInputAttributes = 32,
695       .maxVertexInputBindings = 32,
696       .maxVertexInputAttributeOffset = 2047,
697       .maxVertexInputBindingStride = 2048,
698       .maxVertexOutputComponents = 128,
699       .maxTessellationGenerationLevel = 64,
700       .maxTessellationPatchSize = 32,
701       .maxTessellationControlPerVertexInputComponents = 128,
702       .maxTessellationControlPerVertexOutputComponents = 128,
703       .maxTessellationControlPerPatchOutputComponents = 120,
704       .maxTessellationControlTotalOutputComponents = 4096,
705       .maxTessellationEvaluationInputComponents = 128,
706       .maxTessellationEvaluationOutputComponents = 128,
707       .maxGeometryShaderInvocations = 127,
708       .maxGeometryInputComponents = 64,
709       .maxGeometryOutputComponents = 128,
710       .maxGeometryOutputVertices = 256,
711       .maxGeometryTotalOutputComponents = 1024,
712       .maxFragmentInputComponents = 128,
713       .maxFragmentOutputAttachments = 8,
714       .maxFragmentDualSrcAttachments = 1,
715       .maxFragmentCombinedOutputResources = MAX_RTS + max_descriptor_set_size * 2,
716       .maxComputeSharedMemorySize = 32768,
717       .maxComputeWorkGroupCount = { 65535, 65535, 65535 },
718       .maxComputeWorkGroupInvocations = 2048,
719       .maxComputeWorkGroupSize = { 2048, 2048, 2048 },
720       .subPixelPrecisionBits = 4 /* FIXME */,
721       .subTexelPrecisionBits = 4 /* FIXME */,
722       .mipmapPrecisionBits = 4 /* FIXME */,
723       .maxDrawIndexedIndexValue = UINT32_MAX,
724       .maxDrawIndirectCount = UINT32_MAX,
725       .maxSamplerLodBias = 16,
726       .maxSamplerAnisotropy = 16,
727       .maxViewports = MAX_VIEWPORTS,
728       .maxViewportDimensions = { (1 << 14), (1 << 14) },
729       .viewportBoundsRange = { INT16_MIN, INT16_MAX },
730       .viewportSubPixelBits = 8,
731       .minMemoryMapAlignment = 4096, /* A page */
732       .minTexelBufferOffsetAlignment = 64,
733       .minUniformBufferOffsetAlignment = 16,
734       .minStorageBufferOffsetAlignment = 4,
735       .minTexelOffset = -32,
736       .maxTexelOffset = 31,
737       .minTexelGatherOffset = -32,
738       .maxTexelGatherOffset = 31,
739       .minInterpolationOffset = -2,
740       .maxInterpolationOffset = 2,
741       .subPixelInterpolationOffsetBits = 8,
742       .maxFramebufferWidth = (1 << 14),
743       .maxFramebufferHeight = (1 << 14),
744       .maxFramebufferLayers = (1 << 10),
745       .framebufferColorSampleCounts = sample_counts,
746       .framebufferDepthSampleCounts = sample_counts,
747       .framebufferStencilSampleCounts = sample_counts,
748       .framebufferNoAttachmentsSampleCounts = sample_counts,
749       .maxColorAttachments = MAX_RTS,
750       .sampledImageColorSampleCounts = sample_counts,
751       .sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT,
752       .sampledImageDepthSampleCounts = sample_counts,
753       .sampledImageStencilSampleCounts = sample_counts,
754       .storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
755       .maxSampleMaskWords = 1,
756       .timestampComputeAndGraphics = true,
757       .timestampPeriod = 1,
758       .maxClipDistances = 8,
759       .maxCullDistances = 8,
760       .maxCombinedClipAndCullDistances = 8,
761       .discreteQueuePriorities = 1,
762       .pointSizeRange = { 0.125, 4095.9375 },
763       .lineWidthRange = { 0.0, 7.9921875 },
764       .pointSizeGranularity = (1.0 / 16.0),
765       .lineWidthGranularity = (1.0 / 128.0),
766       .strictLines = false, /* FINISHME */
767       .standardSampleLocations = true,
768       .optimalBufferCopyOffsetAlignment = 128,
769       .optimalBufferCopyRowPitchAlignment = 128,
770       .nonCoherentAtomSize = 64,
771    };
772 
773    pProperties->properties = (VkPhysicalDeviceProperties) {
774       .apiVersion = PANVK_API_VERSION,
775       .driverVersion = vk_get_driver_version(),
776       .vendorID = 0, /* TODO */
777       .deviceID = 0,
778       .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
779       .limits = limits,
780       .sparseProperties = { 0 },
781    };
782 
783    strcpy(pProperties->properties.deviceName, pdevice->name);
784    memcpy(pProperties->properties.pipelineCacheUUID, pdevice->cache_uuid, VK_UUID_SIZE);
785 
786    VkPhysicalDeviceVulkan11Properties core_1_1 = {
787       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
788       .deviceLUIDValid                       = false,
789       .pointClippingBehavior                 = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
790       .maxMultiviewViewCount                 = 0,
791       .maxMultiviewInstanceIndex             = 0,
792       .protectedNoFault                      = false,
793       /* Make sure everything is addressable by a signed 32-bit int, and
794        * our largest descriptors are 96 bytes. */
795       .maxPerSetDescriptors                  = (1ull << 31) / 96,
796       /* Our buffer size fields allow only this much */
797       .maxMemoryAllocationSize               = 0xFFFFFFFFull,
798    };
799    memcpy(core_1_1.driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
800    memcpy(core_1_1.deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
801 
802    const VkPhysicalDeviceVulkan12Properties core_1_2 = {
803       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
804    };
805 
806    const VkPhysicalDeviceVulkan13Properties core_1_3 = {
807       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
808    };
809 
810    vk_foreach_struct(ext, pProperties->pNext)
811    {
812       if (vk_get_physical_device_core_1_1_property_ext(ext, &core_1_1))
813          continue;
814       if (vk_get_physical_device_core_1_2_property_ext(ext, &core_1_2))
815          continue;
816       if (vk_get_physical_device_core_1_3_property_ext(ext, &core_1_3))
817          continue;
818 
819       switch (ext->sType) {
820       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
821          VkPhysicalDevicePushDescriptorPropertiesKHR *properties = (VkPhysicalDevicePushDescriptorPropertiesKHR *)ext;
822          properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
823          break;
824       }
825       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
826          VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *properties =
827             (VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
828          /* We have to restrict this a bit for multiview */
829          properties->maxVertexAttribDivisor = UINT32_MAX / (16 * 2048);
830          break;
831       }
832       default:
833          break;
834       }
835    }
836 }
837 
838 static const VkQueueFamilyProperties panvk_queue_family_properties = {
839    .queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
840    .queueCount = 1,
841    .timestampValidBits = 64,
842    .minImageTransferGranularity = { 1, 1, 1 },
843 };
844 
845 void
panvk_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)846 panvk_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
847                                               uint32_t *pQueueFamilyPropertyCount,
848                                               VkQueueFamilyProperties2 *pQueueFamilyProperties)
849 {
850    VK_OUTARRAY_MAKE_TYPED(VkQueueFamilyProperties2, out,
851                           pQueueFamilyProperties,
852                           pQueueFamilyPropertyCount);
853 
854    vk_outarray_append_typed(VkQueueFamilyProperties2, &out, p)
855    {
856       p->queueFamilyProperties = panvk_queue_family_properties;
857    }
858 }
859 
860 static uint64_t
panvk_get_system_heap_size()861 panvk_get_system_heap_size()
862 {
863    struct sysinfo info;
864    sysinfo(&info);
865 
866    uint64_t total_ram = (uint64_t)info.totalram * info.mem_unit;
867 
868    /* We don't want to burn too much ram with the GPU.  If the user has 4GiB
869     * or less, we use at most half.  If they have more than 4GiB, we use 3/4.
870     */
871    uint64_t available_ram;
872    if (total_ram <= 4ull * 1024 * 1024 * 1024)
873       available_ram = total_ram / 2;
874    else
875       available_ram = total_ram * 3 / 4;
876 
877    return available_ram;
878 }
879 
880 void
panvk_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)881 panvk_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
882                                          VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
883 {
884    pMemoryProperties->memoryProperties = (VkPhysicalDeviceMemoryProperties) {
885       .memoryHeapCount = 1,
886       .memoryHeaps[0].size = panvk_get_system_heap_size(),
887       .memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
888       .memoryTypeCount = 1,
889       .memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
890                                       VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
891                                       VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
892       .memoryTypes[0].heapIndex = 0,
893    };
894 }
895 
896 static VkResult
panvk_queue_init(struct panvk_device * device,struct panvk_queue * queue,int idx,const VkDeviceQueueCreateInfo * create_info)897 panvk_queue_init(struct panvk_device *device,
898                  struct panvk_queue *queue,
899                  int idx,
900                  const VkDeviceQueueCreateInfo *create_info)
901 {
902    const struct panfrost_device *pdev = &device->physical_device->pdev;
903 
904    VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info, idx);
905    if (result != VK_SUCCESS)
906       return result;
907    queue->device = device;
908 
909    struct drm_syncobj_create create = {
910       .flags = DRM_SYNCOBJ_CREATE_SIGNALED,
911    };
912 
913    int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_CREATE, &create);
914    if (ret) {
915       vk_queue_finish(&queue->vk);
916       return VK_ERROR_OUT_OF_HOST_MEMORY;
917    }
918 
919    switch (pdev->arch) {
920    case 6: queue->vk.driver_submit = panvk_v6_queue_submit; break;
921    case 7: queue->vk.driver_submit = panvk_v7_queue_submit; break;
922    default: unreachable("Invalid arch");
923    }
924 
925    queue->sync = create.handle;
926    return VK_SUCCESS;
927 }
928 
929 static void
panvk_queue_finish(struct panvk_queue * queue)930 panvk_queue_finish(struct panvk_queue *queue)
931 {
932    vk_queue_finish(&queue->vk);
933 }
934 
935 VkResult
panvk_CreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)936 panvk_CreateDevice(VkPhysicalDevice physicalDevice,
937                    const VkDeviceCreateInfo *pCreateInfo,
938                    const VkAllocationCallbacks *pAllocator,
939                    VkDevice *pDevice)
940 {
941    VK_FROM_HANDLE(panvk_physical_device, physical_device, physicalDevice);
942    VkResult result;
943    struct panvk_device *device;
944 
945    device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator,
946                        sizeof(*device), 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
947    if (!device)
948       return vk_error(physical_device, VK_ERROR_OUT_OF_HOST_MEMORY);
949 
950    const struct vk_device_entrypoint_table *dev_entrypoints;
951    struct vk_device_dispatch_table dispatch_table;
952 
953    switch (physical_device->pdev.arch) {
954    case 6:
955       dev_entrypoints = &panvk_v6_device_entrypoints;
956       break;
957    case 7:
958       dev_entrypoints = &panvk_v7_device_entrypoints;
959       break;
960    default:
961       unreachable("Unsupported architecture");
962    }
963 
964    /* For secondary command buffer support, overwrite any command entrypoints
965     * in the main device-level dispatch table with
966     * vk_cmd_enqueue_unless_primary_Cmd*.
967     */
968    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
969                                              &vk_cmd_enqueue_unless_primary_device_entrypoints,
970                                              true);
971 
972    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
973                                              dev_entrypoints,
974                                              false);
975    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
976                                              &panvk_device_entrypoints,
977                                              false);
978    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
979                                              &wsi_device_entrypoints,
980                                              false);
981 
982    /* Populate our primary cmd_dispatch table. */
983    vk_device_dispatch_table_from_entrypoints(&device->cmd_dispatch,
984                                              dev_entrypoints,
985                                              true);
986    vk_device_dispatch_table_from_entrypoints(&device->cmd_dispatch,
987                                              &panvk_device_entrypoints,
988                                              false);
989    vk_device_dispatch_table_from_entrypoints(&device->cmd_dispatch,
990                                              &vk_common_device_entrypoints,
991                                              false);
992 
993    result = vk_device_init(&device->vk, &physical_device->vk, &dispatch_table,
994                            pCreateInfo, pAllocator);
995    if (result != VK_SUCCESS) {
996       vk_free(&device->vk.alloc, device);
997       return result;
998    }
999 
1000    /* Must be done after vk_device_init() because this function memset(0) the
1001     * whole struct.
1002     */
1003    device->vk.command_dispatch_table = &device->cmd_dispatch;
1004 
1005    device->instance = physical_device->instance;
1006    device->physical_device = physical_device;
1007 
1008    const struct panfrost_device *pdev = &physical_device->pdev;
1009    vk_device_set_drm_fd(&device->vk, pdev->fd);
1010 
1011    for (unsigned i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
1012       const VkDeviceQueueCreateInfo *queue_create =
1013          &pCreateInfo->pQueueCreateInfos[i];
1014       uint32_t qfi = queue_create->queueFamilyIndex;
1015       device->queues[qfi] =
1016          vk_alloc(&device->vk.alloc,
1017                   queue_create->queueCount * sizeof(struct panvk_queue),
1018                   8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1019       if (!device->queues[qfi]) {
1020          result = VK_ERROR_OUT_OF_HOST_MEMORY;
1021          goto fail;
1022       }
1023 
1024       memset(device->queues[qfi], 0,
1025              queue_create->queueCount * sizeof(struct panvk_queue));
1026 
1027       device->queue_count[qfi] = queue_create->queueCount;
1028 
1029       for (unsigned q = 0; q < queue_create->queueCount; q++) {
1030          result = panvk_queue_init(device, &device->queues[qfi][q], q,
1031                                    queue_create);
1032          if (result != VK_SUCCESS)
1033             goto fail;
1034       }
1035    }
1036 
1037    *pDevice = panvk_device_to_handle(device);
1038    return VK_SUCCESS;
1039 
1040 fail:
1041    for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1042       for (unsigned q = 0; q < device->queue_count[i]; q++)
1043          panvk_queue_finish(&device->queues[i][q]);
1044       if (device->queue_count[i])
1045          vk_object_free(&device->vk, NULL, device->queues[i]);
1046    }
1047 
1048    vk_free(&device->vk.alloc, device);
1049    return result;
1050 }
1051 
1052 void
panvk_DestroyDevice(VkDevice _device,const VkAllocationCallbacks * pAllocator)1053 panvk_DestroyDevice(VkDevice _device, const VkAllocationCallbacks *pAllocator)
1054 {
1055    VK_FROM_HANDLE(panvk_device, device, _device);
1056 
1057    if (!device)
1058       return;
1059 
1060    for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1061       for (unsigned q = 0; q < device->queue_count[i]; q++)
1062          panvk_queue_finish(&device->queues[i][q]);
1063       if (device->queue_count[i])
1064          vk_object_free(&device->vk, NULL, device->queues[i]);
1065    }
1066 
1067    vk_free(&device->vk.alloc, device);
1068 }
1069 
1070 VkResult
panvk_EnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1071 panvk_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1072                                        VkLayerProperties *pProperties)
1073 {
1074    *pPropertyCount = 0;
1075    return VK_SUCCESS;
1076 }
1077 
1078 VkResult
panvk_QueueWaitIdle(VkQueue _queue)1079 panvk_QueueWaitIdle(VkQueue _queue)
1080 {
1081    VK_FROM_HANDLE(panvk_queue, queue, _queue);
1082 
1083    if (panvk_device_is_lost(queue->device))
1084       return VK_ERROR_DEVICE_LOST;
1085 
1086    const struct panfrost_device *pdev = &queue->device->physical_device->pdev;
1087    struct drm_syncobj_wait wait = {
1088       .handles = (uint64_t) (uintptr_t)(&queue->sync),
1089       .count_handles = 1,
1090       .timeout_nsec = INT64_MAX,
1091       .flags = DRM_SYNCOBJ_WAIT_FLAGS_WAIT_ALL,
1092    };
1093    int ret;
1094 
1095    ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_WAIT, &wait);
1096    assert(!ret);
1097 
1098    return VK_SUCCESS;
1099 }
1100 
1101 VkResult
panvk_EnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1102 panvk_EnumerateInstanceExtensionProperties(const char *pLayerName,
1103                                            uint32_t *pPropertyCount,
1104                                            VkExtensionProperties *pProperties)
1105 {
1106    if (pLayerName)
1107       return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1108 
1109    return vk_enumerate_instance_extension_properties(&panvk_instance_extensions,
1110                                                      pPropertyCount, pProperties);
1111 }
1112 
1113 PFN_vkVoidFunction
panvk_GetInstanceProcAddr(VkInstance _instance,const char * pName)1114 panvk_GetInstanceProcAddr(VkInstance _instance, const char *pName)
1115 {
1116    VK_FROM_HANDLE(panvk_instance, instance, _instance);
1117    return vk_instance_get_proc_addr(&instance->vk,
1118                                     &panvk_instance_entrypoints,
1119                                     pName);
1120 }
1121 
1122 /* The loader wants us to expose a second GetInstanceProcAddr function
1123  * to work around certain LD_PRELOAD issues seen in apps.
1124  */
1125 PUBLIC
1126 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1127 vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName);
1128 
1129 PUBLIC
1130 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)1131 vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
1132 {
1133    return panvk_GetInstanceProcAddr(instance, pName);
1134 }
1135 
1136 /* With version 4+ of the loader interface the ICD should expose
1137  * vk_icdGetPhysicalDeviceProcAddr()
1138  */
1139 PUBLIC
1140 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1141 vk_icdGetPhysicalDeviceProcAddr(VkInstance  _instance,
1142                                 const char* pName);
1143 
1144 PFN_vkVoidFunction
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,const char * pName)1145 vk_icdGetPhysicalDeviceProcAddr(VkInstance  _instance,
1146                                 const char* pName)
1147 {
1148    VK_FROM_HANDLE(panvk_instance, instance, _instance);
1149 
1150    return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
1151 }
1152 
1153 VkResult
panvk_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)1154 panvk_AllocateMemory(VkDevice _device,
1155                      const VkMemoryAllocateInfo *pAllocateInfo,
1156                      const VkAllocationCallbacks *pAllocator,
1157                      VkDeviceMemory *pMem)
1158 {
1159    VK_FROM_HANDLE(panvk_device, device, _device);
1160    struct panvk_device_memory *mem;
1161 
1162    assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1163 
1164    if (pAllocateInfo->allocationSize == 0) {
1165       /* Apparently, this is allowed */
1166       *pMem = VK_NULL_HANDLE;
1167       return VK_SUCCESS;
1168    }
1169 
1170    mem = vk_object_alloc(&device->vk, pAllocator, sizeof(*mem),
1171                          VK_OBJECT_TYPE_DEVICE_MEMORY);
1172    if (mem == NULL)
1173       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1174 
1175    const VkImportMemoryFdInfoKHR *fd_info =
1176       vk_find_struct_const(pAllocateInfo->pNext,
1177                            IMPORT_MEMORY_FD_INFO_KHR);
1178 
1179    if (fd_info && !fd_info->handleType)
1180       fd_info = NULL;
1181 
1182    if (fd_info) {
1183       assert(fd_info->handleType ==
1184                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
1185              fd_info->handleType ==
1186                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1187 
1188       /*
1189        * TODO Importing the same fd twice gives us the same handle without
1190        * reference counting.  We need to maintain a per-instance handle-to-bo
1191        * table and add reference count to panvk_bo.
1192        */
1193       mem->bo = panfrost_bo_import(&device->physical_device->pdev, fd_info->fd);
1194       /* take ownership and close the fd */
1195       close(fd_info->fd);
1196    } else {
1197       mem->bo = panfrost_bo_create(&device->physical_device->pdev,
1198                                    pAllocateInfo->allocationSize, 0,
1199                                    "User-requested memory");
1200    }
1201 
1202    assert(mem->bo);
1203 
1204    *pMem = panvk_device_memory_to_handle(mem);
1205 
1206    return VK_SUCCESS;
1207 }
1208 
1209 void
panvk_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)1210 panvk_FreeMemory(VkDevice _device,
1211                  VkDeviceMemory _mem,
1212                  const VkAllocationCallbacks *pAllocator)
1213 {
1214    VK_FROM_HANDLE(panvk_device, device, _device);
1215    VK_FROM_HANDLE(panvk_device_memory, mem, _mem);
1216 
1217    if (mem == NULL)
1218       return;
1219 
1220    panfrost_bo_unreference(mem->bo);
1221    vk_object_free(&device->vk, pAllocator, mem);
1222 }
1223 
1224 VkResult
panvk_MapMemory(VkDevice _device,VkDeviceMemory _memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1225 panvk_MapMemory(VkDevice _device,
1226                 VkDeviceMemory _memory,
1227                 VkDeviceSize offset,
1228                 VkDeviceSize size,
1229                 VkMemoryMapFlags flags,
1230                 void **ppData)
1231 {
1232    VK_FROM_HANDLE(panvk_device, device, _device);
1233    VK_FROM_HANDLE(panvk_device_memory, mem, _memory);
1234 
1235    if (mem == NULL) {
1236       *ppData = NULL;
1237       return VK_SUCCESS;
1238    }
1239 
1240    if (!mem->bo->ptr.cpu)
1241       panfrost_bo_mmap(mem->bo);
1242 
1243    *ppData = mem->bo->ptr.cpu;
1244 
1245    if (*ppData) {
1246       *ppData += offset;
1247       return VK_SUCCESS;
1248    }
1249 
1250    return vk_error(device, VK_ERROR_MEMORY_MAP_FAILED);
1251 }
1252 
1253 void
panvk_UnmapMemory(VkDevice _device,VkDeviceMemory _memory)1254 panvk_UnmapMemory(VkDevice _device, VkDeviceMemory _memory)
1255 {
1256 }
1257 
1258 VkResult
panvk_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1259 panvk_FlushMappedMemoryRanges(VkDevice _device,
1260                               uint32_t memoryRangeCount,
1261                               const VkMappedMemoryRange *pMemoryRanges)
1262 {
1263    return VK_SUCCESS;
1264 }
1265 
1266 VkResult
panvk_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1267 panvk_InvalidateMappedMemoryRanges(VkDevice _device,
1268                                    uint32_t memoryRangeCount,
1269                                    const VkMappedMemoryRange *pMemoryRanges)
1270 {
1271    return VK_SUCCESS;
1272 }
1273 
1274 void
panvk_GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1275 panvk_GetBufferMemoryRequirements2(VkDevice device,
1276                                    const VkBufferMemoryRequirementsInfo2 *pInfo,
1277                                    VkMemoryRequirements2 *pMemoryRequirements)
1278 {
1279    VK_FROM_HANDLE(panvk_buffer, buffer, pInfo->buffer);
1280 
1281    const uint64_t align = 64;
1282    const uint64_t size = align64(buffer->vk.size, align);
1283 
1284    pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
1285    pMemoryRequirements->memoryRequirements.alignment = align;
1286    pMemoryRequirements->memoryRequirements.size = size;
1287 }
1288 
1289 void
panvk_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1290 panvk_GetImageMemoryRequirements2(VkDevice device,
1291                                  const VkImageMemoryRequirementsInfo2 *pInfo,
1292                                  VkMemoryRequirements2 *pMemoryRequirements)
1293 {
1294    VK_FROM_HANDLE(panvk_image, image, pInfo->image);
1295 
1296    const uint64_t align = 4096;
1297    const uint64_t size = panvk_image_get_total_size(image);
1298 
1299    pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
1300    pMemoryRequirements->memoryRequirements.alignment = align;
1301    pMemoryRequirements->memoryRequirements.size = size;
1302 }
1303 
1304 void
panvk_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1305 panvk_GetImageSparseMemoryRequirements2(VkDevice device,
1306                                         const VkImageSparseMemoryRequirementsInfo2 *pInfo,
1307                                         uint32_t *pSparseMemoryRequirementCount,
1308                                         VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
1309 {
1310    panvk_stub();
1311 }
1312 
1313 void
panvk_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)1314 panvk_GetDeviceMemoryCommitment(VkDevice device,
1315                                 VkDeviceMemory memory,
1316                                 VkDeviceSize *pCommittedMemoryInBytes)
1317 {
1318    *pCommittedMemoryInBytes = 0;
1319 }
1320 
1321 VkResult
panvk_BindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1322 panvk_BindBufferMemory2(VkDevice device,
1323                         uint32_t bindInfoCount,
1324                         const VkBindBufferMemoryInfo *pBindInfos)
1325 {
1326    for (uint32_t i = 0; i < bindInfoCount; ++i) {
1327       VK_FROM_HANDLE(panvk_device_memory, mem, pBindInfos[i].memory);
1328       VK_FROM_HANDLE(panvk_buffer, buffer, pBindInfos[i].buffer);
1329 
1330       if (mem) {
1331          buffer->bo = mem->bo;
1332          buffer->bo_offset = pBindInfos[i].memoryOffset;
1333       } else {
1334          buffer->bo = NULL;
1335       }
1336    }
1337    return VK_SUCCESS;
1338 }
1339 
1340 VkResult
panvk_BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)1341 panvk_BindImageMemory2(VkDevice device,
1342                        uint32_t bindInfoCount,
1343                        const VkBindImageMemoryInfo *pBindInfos)
1344 {
1345    for (uint32_t i = 0; i < bindInfoCount; ++i) {
1346       VK_FROM_HANDLE(panvk_image, image, pBindInfos[i].image);
1347       VK_FROM_HANDLE(panvk_device_memory, mem, pBindInfos[i].memory);
1348 
1349       if (mem) {
1350          image->pimage.data.bo = mem->bo;
1351          image->pimage.data.offset = pBindInfos[i].memoryOffset;
1352          /* Reset the AFBC headers */
1353          if (drm_is_afbc(image->pimage.layout.modifier)) {
1354             void *base = image->pimage.data.bo->ptr.cpu + image->pimage.data.offset;
1355 
1356             for (unsigned layer = 0; layer < image->pimage.layout.array_size; layer++) {
1357                for (unsigned level = 0; level < image->pimage.layout.nr_slices; level++) {
1358                   void *header = base +
1359                                  (layer * image->pimage.layout.array_stride) +
1360                                  image->pimage.layout.slices[level].offset;
1361                   memset(header, 0, image->pimage.layout.slices[level].afbc.header_size);
1362                }
1363             }
1364          }
1365       } else {
1366          image->pimage.data.bo = NULL;
1367          image->pimage.data.offset = pBindInfos[i].memoryOffset;
1368       }
1369    }
1370 
1371    return VK_SUCCESS;
1372 }
1373 
1374 VkResult
panvk_CreateEvent(VkDevice _device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1375 panvk_CreateEvent(VkDevice _device,
1376                   const VkEventCreateInfo *pCreateInfo,
1377                   const VkAllocationCallbacks *pAllocator,
1378                   VkEvent *pEvent)
1379 {
1380    VK_FROM_HANDLE(panvk_device, device, _device);
1381    const struct panfrost_device *pdev = &device->physical_device->pdev;
1382    struct panvk_event *event =
1383       vk_object_zalloc(&device->vk, pAllocator, sizeof(*event),
1384                        VK_OBJECT_TYPE_EVENT);
1385    if (!event)
1386       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1387 
1388    struct drm_syncobj_create create = {
1389       .flags = 0,
1390    };
1391 
1392    int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_CREATE, &create);
1393    if (ret)
1394       return VK_ERROR_OUT_OF_HOST_MEMORY;
1395 
1396    event->syncobj = create.handle;
1397    *pEvent = panvk_event_to_handle(event);
1398 
1399    return VK_SUCCESS;
1400 }
1401 
1402 void
panvk_DestroyEvent(VkDevice _device,VkEvent _event,const VkAllocationCallbacks * pAllocator)1403 panvk_DestroyEvent(VkDevice _device,
1404                    VkEvent _event,
1405                    const VkAllocationCallbacks *pAllocator)
1406 {
1407    VK_FROM_HANDLE(panvk_device, device, _device);
1408    VK_FROM_HANDLE(panvk_event, event, _event);
1409    const struct panfrost_device *pdev = &device->physical_device->pdev;
1410 
1411    if (!event)
1412       return;
1413 
1414    struct drm_syncobj_destroy destroy = { .handle = event->syncobj };
1415    drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_DESTROY, &destroy);
1416 
1417    vk_object_free(&device->vk, pAllocator, event);
1418 }
1419 
1420 VkResult
panvk_GetEventStatus(VkDevice _device,VkEvent _event)1421 panvk_GetEventStatus(VkDevice _device, VkEvent _event)
1422 {
1423    VK_FROM_HANDLE(panvk_device, device, _device);
1424    VK_FROM_HANDLE(panvk_event, event, _event);
1425    const struct panfrost_device *pdev = &device->physical_device->pdev;
1426    bool signaled;
1427 
1428    struct drm_syncobj_wait wait = {
1429       .handles = (uintptr_t) &event->syncobj,
1430       .count_handles = 1,
1431       .timeout_nsec = 0,
1432       .flags = DRM_SYNCOBJ_WAIT_FLAGS_WAIT_FOR_SUBMIT,
1433    };
1434 
1435    int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_WAIT, &wait);
1436    if (ret) {
1437       if (errno == ETIME)
1438          signaled = false;
1439       else {
1440          assert(0);
1441          return VK_ERROR_DEVICE_LOST; /* TODO */
1442       }
1443    } else
1444       signaled = true;
1445 
1446    return signaled ? VK_EVENT_SET : VK_EVENT_RESET;
1447 }
1448 
1449 VkResult
panvk_SetEvent(VkDevice _device,VkEvent _event)1450 panvk_SetEvent(VkDevice _device, VkEvent _event)
1451 {
1452    VK_FROM_HANDLE(panvk_device, device, _device);
1453    VK_FROM_HANDLE(panvk_event, event, _event);
1454    const struct panfrost_device *pdev = &device->physical_device->pdev;
1455 
1456    struct drm_syncobj_array objs = {
1457       .handles = (uint64_t) (uintptr_t) &event->syncobj,
1458       .count_handles = 1
1459    };
1460 
1461    /* This is going to just replace the fence for this syncobj with one that
1462     * is already in signaled state. This won't be a problem because the spec
1463     * mandates that the event will have been set before the vkCmdWaitEvents
1464     * command executes.
1465     * https://www.khronos.org/registry/vulkan/specs/1.2/html/chap6.html#commandbuffers-submission-progress
1466     */
1467    if (drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_SIGNAL, &objs))
1468       return VK_ERROR_DEVICE_LOST;
1469 
1470   return VK_SUCCESS;
1471 }
1472 
1473 VkResult
panvk_ResetEvent(VkDevice _device,VkEvent _event)1474 panvk_ResetEvent(VkDevice _device, VkEvent _event)
1475 {
1476    VK_FROM_HANDLE(panvk_device, device, _device);
1477    VK_FROM_HANDLE(panvk_event, event, _event);
1478    const struct panfrost_device *pdev = &device->physical_device->pdev;
1479 
1480    struct drm_syncobj_array objs = {
1481       .handles = (uint64_t) (uintptr_t) &event->syncobj,
1482       .count_handles = 1
1483    };
1484 
1485    if (drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_RESET, &objs))
1486       return VK_ERROR_DEVICE_LOST;
1487 
1488   return VK_SUCCESS;
1489 }
1490 
1491 VkResult
panvk_CreateBuffer(VkDevice _device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1492 panvk_CreateBuffer(VkDevice _device,
1493                    const VkBufferCreateInfo *pCreateInfo,
1494                    const VkAllocationCallbacks *pAllocator,
1495                    VkBuffer *pBuffer)
1496 {
1497    VK_FROM_HANDLE(panvk_device, device, _device);
1498    struct panvk_buffer *buffer;
1499 
1500    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1501 
1502    buffer = vk_buffer_create(&device->vk, pCreateInfo,
1503                              pAllocator, sizeof(*buffer));
1504    if (buffer == NULL)
1505       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1506 
1507    *pBuffer = panvk_buffer_to_handle(buffer);
1508 
1509    return VK_SUCCESS;
1510 }
1511 
1512 void
panvk_DestroyBuffer(VkDevice _device,VkBuffer _buffer,const VkAllocationCallbacks * pAllocator)1513 panvk_DestroyBuffer(VkDevice _device,
1514                     VkBuffer _buffer,
1515                     const VkAllocationCallbacks *pAllocator)
1516 {
1517    VK_FROM_HANDLE(panvk_device, device, _device);
1518    VK_FROM_HANDLE(panvk_buffer, buffer, _buffer);
1519 
1520    if (!buffer)
1521       return;
1522 
1523    vk_buffer_destroy(&device->vk, pAllocator, &buffer->vk);
1524 }
1525 
1526 VkResult
panvk_CreateFramebuffer(VkDevice _device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)1527 panvk_CreateFramebuffer(VkDevice _device,
1528                         const VkFramebufferCreateInfo *pCreateInfo,
1529                         const VkAllocationCallbacks *pAllocator,
1530                         VkFramebuffer *pFramebuffer)
1531 {
1532    VK_FROM_HANDLE(panvk_device, device, _device);
1533    struct panvk_framebuffer *framebuffer;
1534 
1535    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
1536 
1537    size_t size = sizeof(*framebuffer) + sizeof(struct panvk_attachment_info) *
1538                                            pCreateInfo->attachmentCount;
1539    framebuffer = vk_object_alloc(&device->vk, pAllocator, size,
1540                                  VK_OBJECT_TYPE_FRAMEBUFFER);
1541    if (framebuffer == NULL)
1542       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1543 
1544    framebuffer->attachment_count = pCreateInfo->attachmentCount;
1545    framebuffer->width = pCreateInfo->width;
1546    framebuffer->height = pCreateInfo->height;
1547    framebuffer->layers = pCreateInfo->layers;
1548    for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
1549       VkImageView _iview = pCreateInfo->pAttachments[i];
1550       struct panvk_image_view *iview = panvk_image_view_from_handle(_iview);
1551       framebuffer->attachments[i].iview = iview;
1552    }
1553 
1554    *pFramebuffer = panvk_framebuffer_to_handle(framebuffer);
1555    return VK_SUCCESS;
1556 }
1557 
1558 void
panvk_DestroyFramebuffer(VkDevice _device,VkFramebuffer _fb,const VkAllocationCallbacks * pAllocator)1559 panvk_DestroyFramebuffer(VkDevice _device,
1560                          VkFramebuffer _fb,
1561                          const VkAllocationCallbacks *pAllocator)
1562 {
1563    VK_FROM_HANDLE(panvk_device, device, _device);
1564    VK_FROM_HANDLE(panvk_framebuffer, fb, _fb);
1565 
1566    if (fb)
1567       vk_object_free(&device->vk, pAllocator, fb);
1568 }
1569 
1570 void
panvk_DestroySampler(VkDevice _device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)1571 panvk_DestroySampler(VkDevice _device,
1572                      VkSampler _sampler,
1573                      const VkAllocationCallbacks *pAllocator)
1574 {
1575    VK_FROM_HANDLE(panvk_device, device, _device);
1576    VK_FROM_HANDLE(panvk_sampler, sampler, _sampler);
1577 
1578    if (!sampler)
1579       return;
1580 
1581    vk_object_free(&device->vk, pAllocator, sampler);
1582 }
1583 
1584 /* vk_icd.h does not declare this function, so we declare it here to
1585  * suppress Wmissing-prototypes.
1586  */
1587 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
1588 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion);
1589 
1590 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)1591 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
1592 {
1593    /* For the full details on loader interface versioning, see
1594     * <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
1595     * What follows is a condensed summary, to help you navigate the large and
1596     * confusing official doc.
1597     *
1598     *   - Loader interface v0 is incompatible with later versions. We don't
1599     *     support it.
1600     *
1601     *   - In loader interface v1:
1602     *       - The first ICD entrypoint called by the loader is
1603     *         vk_icdGetInstanceProcAddr(). The ICD must statically expose this
1604     *         entrypoint.
1605     *       - The ICD must statically expose no other Vulkan symbol unless it
1606     * is linked with -Bsymbolic.
1607     *       - Each dispatchable Vulkan handle created by the ICD must be
1608     *         a pointer to a struct whose first member is VK_LOADER_DATA. The
1609     *         ICD must initialize VK_LOADER_DATA.loadMagic to
1610     * ICD_LOADER_MAGIC.
1611     *       - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
1612     *         vkDestroySurfaceKHR(). The ICD must be capable of working with
1613     *         such loader-managed surfaces.
1614     *
1615     *    - Loader interface v2 differs from v1 in:
1616     *       - The first ICD entrypoint called by the loader is
1617     *         vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
1618     *         statically expose this entrypoint.
1619     *
1620     *    - Loader interface v3 differs from v2 in:
1621     *        - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
1622     *          vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
1623     *          because the loader no longer does so.
1624     *
1625     *    - Loader interface v4 differs from v3 in:
1626     *        - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
1627     *
1628     *    - Loader interface v5 differs from v4 in:
1629     *        - The ICD must support 1.1 and must not return
1630     *          VK_ERROR_INCOMPATIBLE_DRIVER from vkCreateInstance() unless a
1631     *          Vulkan Loader with interface v4 or smaller is being used and the
1632     *          application provides an API version that is greater than 1.0.
1633     */
1634    *pSupportedVersion = MIN2(*pSupportedVersion, 5u);
1635    return VK_SUCCESS;
1636 }
1637 
1638 VkResult
panvk_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFd)1639 panvk_GetMemoryFdKHR(VkDevice _device,
1640                      const VkMemoryGetFdInfoKHR *pGetFdInfo,
1641                      int *pFd)
1642 {
1643    VK_FROM_HANDLE(panvk_device, device, _device);
1644    VK_FROM_HANDLE(panvk_device_memory, memory, pGetFdInfo->memory);
1645 
1646    assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
1647 
1648    /* At the moment, we support only the below handle types. */
1649    assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
1650           pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1651 
1652    int prime_fd = panfrost_bo_export(memory->bo);
1653    if (prime_fd < 0)
1654       return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1655 
1656    *pFd = prime_fd;
1657    return VK_SUCCESS;
1658 }
1659 
1660 VkResult
panvk_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1661 panvk_GetMemoryFdPropertiesKHR(VkDevice _device,
1662                                VkExternalMemoryHandleTypeFlagBits handleType,
1663                                int fd,
1664                                VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1665 {
1666    assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1667    pMemoryFdProperties->memoryTypeBits = 1;
1668    return VK_SUCCESS;
1669 }
1670 
1671 void
panvk_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)1672 panvk_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,
1673                                                    const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
1674                                                    VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
1675 {
1676    if ((pExternalSemaphoreInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ||
1677         pExternalSemaphoreInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
1678       pExternalSemaphoreProperties->exportFromImportedHandleTypes =
1679          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT |
1680          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
1681       pExternalSemaphoreProperties->compatibleHandleTypes =
1682          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT |
1683          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
1684       pExternalSemaphoreProperties->externalSemaphoreFeatures =
1685          VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
1686          VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
1687    } else {
1688       pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
1689       pExternalSemaphoreProperties->compatibleHandleTypes = 0;
1690       pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
1691    }
1692 }
1693 
1694 void
panvk_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)1695 panvk_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,
1696                                                const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
1697                                                VkExternalFenceProperties *pExternalFenceProperties)
1698 {
1699    pExternalFenceProperties->exportFromImportedHandleTypes = 0;
1700    pExternalFenceProperties->compatibleHandleTypes = 0;
1701    pExternalFenceProperties->externalFenceFeatures = 0;
1702 }
1703 
1704 void
panvk_GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)1705 panvk_GetDeviceGroupPeerMemoryFeatures(VkDevice device,
1706                                        uint32_t heapIndex,
1707                                        uint32_t localDeviceIndex,
1708                                        uint32_t remoteDeviceIndex,
1709                                        VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
1710 {
1711    assert(localDeviceIndex == remoteDeviceIndex);
1712 
1713    *pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
1714                           VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
1715                           VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
1716                           VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
1717 }
1718