1 /*
2 * Copyright © 2019 Raspberry Pi
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <fcntl.h>
26 #include <stdbool.h>
27 #include <string.h>
28 #include <sys/mman.h>
29 #include <sys/sysinfo.h>
30 #include <unistd.h>
31 #include <xf86drm.h>
32
33 #ifdef MAJOR_IN_MKDEV
34 #include <sys/mkdev.h>
35 #endif
36 #ifdef MAJOR_IN_SYSMACROS
37 #include <sys/sysmacros.h>
38 #endif
39
40 #include "v3dv_private.h"
41
42 #include "common/v3d_debug.h"
43
44 #include "compiler/v3d_compiler.h"
45
46 #include "drm-uapi/v3d_drm.h"
47 #include "format/u_format.h"
48 #include "vk_util.h"
49
50 #include "util/build_id.h"
51 #include "util/debug.h"
52 #include "util/u_cpu_detect.h"
53
54 #ifdef VK_USE_PLATFORM_XCB_KHR
55 #include <xcb/xcb.h>
56 #include <xcb/dri3.h>
57 #include <X11/Xlib-xcb.h>
58 #endif
59
60 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
61 #include <wayland-client.h>
62 #include "wayland-drm-client-protocol.h"
63 #endif
64
65 #ifdef USE_V3D_SIMULATOR
66 #include "drm-uapi/i915_drm.h"
67 #endif
68
69 #define V3DV_API_VERSION VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION)
70
71 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_EnumerateInstanceVersion(uint32_t * pApiVersion)72 v3dv_EnumerateInstanceVersion(uint32_t *pApiVersion)
73 {
74 *pApiVersion = V3DV_API_VERSION;
75 return VK_SUCCESS;
76 }
77
78 #if defined(VK_USE_PLATFORM_WIN32_KHR) || \
79 defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
80 defined(VK_USE_PLATFORM_XCB_KHR) || \
81 defined(VK_USE_PLATFORM_XLIB_KHR) || \
82 defined(VK_USE_PLATFORM_DISPLAY_KHR)
83 #define V3DV_USE_WSI_PLATFORM
84 #endif
85
86 static const struct vk_instance_extension_table instance_extensions = {
87 .KHR_device_group_creation = true,
88 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
89 .KHR_display = true,
90 .KHR_get_display_properties2 = true,
91 #endif
92 .KHR_external_fence_capabilities = true,
93 .KHR_external_memory_capabilities = true,
94 .KHR_external_semaphore_capabilities = true,
95 .KHR_get_physical_device_properties2 = true,
96 #ifdef V3DV_USE_WSI_PLATFORM
97 .KHR_get_surface_capabilities2 = true,
98 .KHR_surface = true,
99 .KHR_surface_protected_capabilities = true,
100 #endif
101 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
102 .KHR_wayland_surface = true,
103 #endif
104 #ifdef VK_USE_PLATFORM_XCB_KHR
105 .KHR_xcb_surface = true,
106 #endif
107 #ifdef VK_USE_PLATFORM_XLIB_KHR
108 .KHR_xlib_surface = true,
109 #endif
110 .EXT_debug_report = true,
111 };
112
113 static void
get_device_extensions(const struct v3dv_physical_device * device,struct vk_device_extension_table * ext)114 get_device_extensions(const struct v3dv_physical_device *device,
115 struct vk_device_extension_table *ext)
116 {
117 *ext = (struct vk_device_extension_table) {
118 .KHR_bind_memory2 = true,
119 .KHR_copy_commands2 = true,
120 .KHR_dedicated_allocation = true,
121 .KHR_device_group = true,
122 .KHR_descriptor_update_template = true,
123 .KHR_external_fence = true,
124 .KHR_external_fence_fd = true,
125 .KHR_external_memory = true,
126 .KHR_external_memory_fd = true,
127 .KHR_external_semaphore = true,
128 .KHR_external_semaphore_fd = true,
129 .KHR_get_memory_requirements2 = true,
130 .KHR_image_format_list = true,
131 .KHR_relaxed_block_layout = true,
132 .KHR_maintenance1 = true,
133 .KHR_maintenance2 = true,
134 .KHR_maintenance3 = true,
135 .KHR_multiview = true,
136 .KHR_shader_non_semantic_info = true,
137 .KHR_sampler_mirror_clamp_to_edge = true,
138 .KHR_storage_buffer_storage_class = true,
139 .KHR_uniform_buffer_standard_layout = true,
140 #ifdef V3DV_HAS_SURFACE
141 .KHR_swapchain = true,
142 .KHR_incremental_present = true,
143 #endif
144 .KHR_variable_pointers = true,
145 .EXT_color_write_enable = true,
146 .EXT_custom_border_color = true,
147 .EXT_external_memory_dma_buf = true,
148 .EXT_index_type_uint8 = true,
149 .EXT_physical_device_drm = true,
150 .EXT_pipeline_creation_cache_control = true,
151 .EXT_pipeline_creation_feedback = true,
152 .EXT_private_data = true,
153 .EXT_provoking_vertex = true,
154 .EXT_vertex_attribute_divisor = true,
155 };
156 }
157
158 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_EnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)159 v3dv_EnumerateInstanceExtensionProperties(const char *pLayerName,
160 uint32_t *pPropertyCount,
161 VkExtensionProperties *pProperties)
162 {
163 /* We don't support any layers */
164 if (pLayerName)
165 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
166
167 return vk_enumerate_instance_extension_properties(
168 &instance_extensions, pPropertyCount, pProperties);
169 }
170
171 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)172 v3dv_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
173 const VkAllocationCallbacks *pAllocator,
174 VkInstance *pInstance)
175 {
176 struct v3dv_instance *instance;
177 VkResult result;
178
179 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
180
181 if (pAllocator == NULL)
182 pAllocator = vk_default_allocator();
183
184 instance = vk_alloc(pAllocator, sizeof(*instance), 8,
185 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
186 if (!instance)
187 return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
188
189 struct vk_instance_dispatch_table dispatch_table;
190 vk_instance_dispatch_table_from_entrypoints(
191 &dispatch_table, &v3dv_instance_entrypoints, true);
192 vk_instance_dispatch_table_from_entrypoints(
193 &dispatch_table, &wsi_instance_entrypoints, false);
194
195 result = vk_instance_init(&instance->vk,
196 &instance_extensions,
197 &dispatch_table,
198 pCreateInfo, pAllocator);
199
200 if (result != VK_SUCCESS) {
201 vk_free(pAllocator, instance);
202 return vk_error(NULL, result);
203 }
204
205 v3d_process_debug_variable();
206
207 instance->physicalDeviceCount = -1;
208
209 /* We start with the default values for the pipeline_cache envvars */
210 instance->pipeline_cache_enabled = true;
211 instance->default_pipeline_cache_enabled = true;
212 const char *pipeline_cache_str = getenv("V3DV_ENABLE_PIPELINE_CACHE");
213 if (pipeline_cache_str != NULL) {
214 if (strncmp(pipeline_cache_str, "full", 4) == 0) {
215 /* nothing to do, just to filter correct values */
216 } else if (strncmp(pipeline_cache_str, "no-default-cache", 16) == 0) {
217 instance->default_pipeline_cache_enabled = false;
218 } else if (strncmp(pipeline_cache_str, "off", 3) == 0) {
219 instance->pipeline_cache_enabled = false;
220 instance->default_pipeline_cache_enabled = false;
221 } else {
222 fprintf(stderr, "Wrong value for envvar V3DV_ENABLE_PIPELINE_CACHE. "
223 "Allowed values are: full, no-default-cache, off\n");
224 }
225 }
226
227 if (instance->pipeline_cache_enabled == false) {
228 fprintf(stderr, "WARNING: v3dv pipeline cache is disabled. Performance "
229 "can be affected negatively\n");
230 } else {
231 if (instance->default_pipeline_cache_enabled == false) {
232 fprintf(stderr, "WARNING: default v3dv pipeline cache is disabled. "
233 "Performance can be affected negatively\n");
234 }
235 }
236
237 util_cpu_detect();
238
239 VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
240
241 *pInstance = v3dv_instance_to_handle(instance);
242
243 return VK_SUCCESS;
244 }
245
246 static void
v3dv_physical_device_free_disk_cache(struct v3dv_physical_device * device)247 v3dv_physical_device_free_disk_cache(struct v3dv_physical_device *device)
248 {
249 #ifdef ENABLE_SHADER_CACHE
250 if (device->disk_cache)
251 disk_cache_destroy(device->disk_cache);
252 #else
253 assert(device->disk_cache == NULL);
254 #endif
255 }
256
257 static void
physical_device_finish(struct v3dv_physical_device * device)258 physical_device_finish(struct v3dv_physical_device *device)
259 {
260 v3dv_wsi_finish(device);
261 v3dv_physical_device_free_disk_cache(device);
262 v3d_compiler_free(device->compiler);
263
264 close(device->render_fd);
265 if (device->display_fd >= 0)
266 close(device->display_fd);
267 if (device->master_fd >= 0)
268 close(device->master_fd);
269
270 free(device->name);
271
272 #if using_v3d_simulator
273 v3d_simulator_destroy(device->sim_file);
274 #endif
275
276 vk_physical_device_finish(&device->vk);
277 mtx_destroy(&device->mutex);
278 }
279
280 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyInstance(VkInstance _instance,const VkAllocationCallbacks * pAllocator)281 v3dv_DestroyInstance(VkInstance _instance,
282 const VkAllocationCallbacks *pAllocator)
283 {
284 V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
285
286 if (!instance)
287 return;
288
289 if (instance->physicalDeviceCount > 0) {
290 /* We support at most one physical device. */
291 assert(instance->physicalDeviceCount == 1);
292 physical_device_finish(&instance->physicalDevice);
293 }
294
295 VG(VALGRIND_DESTROY_MEMPOOL(instance));
296
297 vk_instance_finish(&instance->vk);
298 vk_free(&instance->vk.alloc, instance);
299 }
300
301 static uint64_t
compute_heap_size()302 compute_heap_size()
303 {
304 #if !using_v3d_simulator
305 /* Query the total ram from the system */
306 struct sysinfo info;
307 sysinfo(&info);
308
309 uint64_t total_ram = (uint64_t)info.totalram * (uint64_t)info.mem_unit;
310 #else
311 uint64_t total_ram = (uint64_t) v3d_simulator_get_mem_size();
312 #endif
313
314 /* We don't want to burn too much ram with the GPU. If the user has 4GiB
315 * or less, we use at most half. If they have more than 4GiB, we use 3/4.
316 */
317 uint64_t available_ram;
318 if (total_ram <= 4ull * 1024ull * 1024ull * 1024ull)
319 available_ram = total_ram / 2;
320 else
321 available_ram = total_ram * 3 / 4;
322
323 return available_ram;
324 }
325
326 #if !using_v3d_simulator
327 #ifdef VK_USE_PLATFORM_XCB_KHR
328 static int
create_display_fd_xcb(VkIcdSurfaceBase * surface)329 create_display_fd_xcb(VkIcdSurfaceBase *surface)
330 {
331 int fd = -1;
332
333 xcb_connection_t *conn;
334 xcb_dri3_open_reply_t *reply = NULL;
335 if (surface) {
336 if (surface->platform == VK_ICD_WSI_PLATFORM_XLIB)
337 conn = XGetXCBConnection(((VkIcdSurfaceXlib *)surface)->dpy);
338 else
339 conn = ((VkIcdSurfaceXcb *)surface)->connection;
340 } else {
341 conn = xcb_connect(NULL, NULL);
342 }
343
344 if (xcb_connection_has_error(conn))
345 goto finish;
346
347 const xcb_setup_t *setup = xcb_get_setup(conn);
348 xcb_screen_iterator_t iter = xcb_setup_roots_iterator(setup);
349 xcb_screen_t *screen = iter.data;
350
351 xcb_dri3_open_cookie_t cookie;
352 cookie = xcb_dri3_open(conn, screen->root, None);
353 reply = xcb_dri3_open_reply(conn, cookie, NULL);
354 if (!reply)
355 goto finish;
356
357 if (reply->nfd != 1)
358 goto finish;
359
360 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
361 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
362
363 finish:
364 if (!surface)
365 xcb_disconnect(conn);
366 if (reply)
367 free(reply);
368
369 return fd;
370 }
371 #endif
372
373 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
374 struct v3dv_wayland_info {
375 struct wl_drm *wl_drm;
376 int fd;
377 bool is_set;
378 bool authenticated;
379 };
380
381 static void
v3dv_drm_handle_device(void * data,struct wl_drm * drm,const char * device)382 v3dv_drm_handle_device(void *data, struct wl_drm *drm, const char *device)
383 {
384 struct v3dv_wayland_info *info = data;
385 info->fd = open(device, O_RDWR | O_CLOEXEC);
386 info->is_set = info->fd != -1;
387 if (!info->is_set) {
388 fprintf(stderr, "v3dv_drm_handle_device: could not open %s (%s)\n",
389 device, strerror(errno));
390 return;
391 }
392
393 drm_magic_t magic;
394 if (drmGetMagic(info->fd, &magic)) {
395 fprintf(stderr, "v3dv_drm_handle_device: drmGetMagic failed\n");
396 close(info->fd);
397 info->fd = -1;
398 info->is_set = false;
399 return;
400 }
401 wl_drm_authenticate(info->wl_drm, magic);
402 }
403
404 static void
v3dv_drm_handle_format(void * data,struct wl_drm * drm,uint32_t format)405 v3dv_drm_handle_format(void *data, struct wl_drm *drm, uint32_t format)
406 {
407 }
408
409 static void
v3dv_drm_handle_authenticated(void * data,struct wl_drm * drm)410 v3dv_drm_handle_authenticated(void *data, struct wl_drm *drm)
411 {
412 struct v3dv_wayland_info *info = data;
413 info->authenticated = true;
414 }
415
416 static void
v3dv_drm_handle_capabilities(void * data,struct wl_drm * drm,uint32_t value)417 v3dv_drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t value)
418 {
419 }
420
421 struct wl_drm_listener v3dv_drm_listener = {
422 .device = v3dv_drm_handle_device,
423 .format = v3dv_drm_handle_format,
424 .authenticated = v3dv_drm_handle_authenticated,
425 .capabilities = v3dv_drm_handle_capabilities
426 };
427
428 static void
v3dv_registry_global(void * data,struct wl_registry * registry,uint32_t name,const char * interface,uint32_t version)429 v3dv_registry_global(void *data,
430 struct wl_registry *registry,
431 uint32_t name,
432 const char *interface,
433 uint32_t version)
434 {
435 struct v3dv_wayland_info *info = data;
436 if (strcmp(interface, "wl_drm") == 0) {
437 info->wl_drm = wl_registry_bind(registry, name, &wl_drm_interface,
438 MIN2(version, 2));
439 wl_drm_add_listener(info->wl_drm, &v3dv_drm_listener, data);
440 };
441 }
442
443 static void
v3dv_registry_global_remove_cb(void * data,struct wl_registry * registry,uint32_t name)444 v3dv_registry_global_remove_cb(void *data,
445 struct wl_registry *registry,
446 uint32_t name)
447 {
448 }
449
450 static int
create_display_fd_wayland(VkIcdSurfaceBase * surface)451 create_display_fd_wayland(VkIcdSurfaceBase *surface)
452 {
453 struct wl_display *display;
454 struct wl_registry *registry = NULL;
455
456 struct v3dv_wayland_info info = {
457 .wl_drm = NULL,
458 .fd = -1,
459 .is_set = false,
460 .authenticated = false
461 };
462
463 if (surface)
464 display = ((VkIcdSurfaceWayland *) surface)->display;
465 else
466 display = wl_display_connect(NULL);
467
468 if (!display)
469 return -1;
470
471 registry = wl_display_get_registry(display);
472 if (!registry) {
473 if (!surface)
474 wl_display_disconnect(display);
475 return -1;
476 }
477
478 static const struct wl_registry_listener registry_listener = {
479 v3dv_registry_global,
480 v3dv_registry_global_remove_cb
481 };
482 wl_registry_add_listener(registry, ®istry_listener, &info);
483
484 wl_display_roundtrip(display); /* For the registry advertisement */
485 wl_display_roundtrip(display); /* For the DRM device event */
486 wl_display_roundtrip(display); /* For the authentication event */
487
488 wl_drm_destroy(info.wl_drm);
489 wl_registry_destroy(registry);
490
491 if (!surface)
492 wl_display_disconnect(display);
493
494 if (!info.is_set)
495 return -1;
496
497 if (!info.authenticated)
498 return -1;
499
500 return info.fd;
501 }
502 #endif
503
504 /* Acquire an authenticated display fd without a surface reference. This is the
505 * case where the application is making WSI allocations outside the Vulkan
506 * swapchain context (only Zink, for now). Since we lack information about the
507 * underlying surface we just try our best to figure out the correct display
508 * and platform to use. It should work in most cases.
509 */
510 static void
acquire_display_device_no_surface(struct v3dv_instance * instance,struct v3dv_physical_device * pdevice)511 acquire_display_device_no_surface(struct v3dv_instance *instance,
512 struct v3dv_physical_device *pdevice)
513 {
514 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
515 pdevice->display_fd = create_display_fd_wayland(NULL);
516 #endif
517
518 #ifdef VK_USE_PLATFORM_XCB_KHR
519 if (pdevice->display_fd == -1)
520 pdevice->display_fd = create_display_fd_xcb(NULL);
521 #endif
522
523 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
524 if (pdevice->display_fd == - 1 && pdevice->master_fd >= 0)
525 pdevice->display_fd = dup(pdevice->master_fd);
526 #endif
527 }
528
529 /* Acquire an authenticated display fd from the surface. This is the regular
530 * case where the application is using swapchains to create WSI allocations.
531 * In this case we use the surface information to figure out the correct
532 * display and platform combination.
533 */
534 static void
acquire_display_device_surface(struct v3dv_instance * instance,struct v3dv_physical_device * pdevice,VkIcdSurfaceBase * surface)535 acquire_display_device_surface(struct v3dv_instance *instance,
536 struct v3dv_physical_device *pdevice,
537 VkIcdSurfaceBase *surface)
538 {
539 /* Mesa will set both of VK_USE_PLATFORM_{XCB,XLIB} when building with
540 * platform X11, so only check for XCB and rely on XCB to get an
541 * authenticated device also for Xlib.
542 */
543 #ifdef VK_USE_PLATFORM_XCB_KHR
544 if (surface->platform == VK_ICD_WSI_PLATFORM_XCB ||
545 surface->platform == VK_ICD_WSI_PLATFORM_XLIB) {
546 pdevice->display_fd = create_display_fd_xcb(surface);
547 }
548 #endif
549
550 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
551 if (surface->platform == VK_ICD_WSI_PLATFORM_WAYLAND)
552 pdevice->display_fd = create_display_fd_wayland(surface);
553 #endif
554
555 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
556 if (surface->platform == VK_ICD_WSI_PLATFORM_DISPLAY &&
557 pdevice->master_fd >= 0) {
558 pdevice->display_fd = dup(pdevice->master_fd);
559 }
560 #endif
561 }
562 #endif /* !using_v3d_simulator */
563
564 /* Attempts to get an authenticated display fd from the display server that
565 * we can use to allocate BOs for presentable images.
566 */
567 VkResult
v3dv_physical_device_acquire_display(struct v3dv_instance * instance,struct v3dv_physical_device * pdevice,VkIcdSurfaceBase * surface)568 v3dv_physical_device_acquire_display(struct v3dv_instance *instance,
569 struct v3dv_physical_device *pdevice,
570 VkIcdSurfaceBase *surface)
571 {
572 VkResult result = VK_SUCCESS;
573 mtx_lock(&pdevice->mutex);
574
575 if (pdevice->display_fd != -1)
576 goto done;
577
578 /* When running on the simulator we do everything on a single render node so
579 * we don't need to get an authenticated display fd from the display server.
580 */
581 #if !using_v3d_simulator
582 if (surface)
583 acquire_display_device_surface(instance, pdevice, surface);
584 else
585 acquire_display_device_no_surface(instance, pdevice);
586
587 if (pdevice->display_fd == -1)
588 result = VK_ERROR_INITIALIZATION_FAILED;
589 #endif
590
591 done:
592 mtx_unlock(&pdevice->mutex);
593 return result;
594 }
595
596 static bool
v3d_has_feature(struct v3dv_physical_device * device,enum drm_v3d_param feature)597 v3d_has_feature(struct v3dv_physical_device *device, enum drm_v3d_param feature)
598 {
599 struct drm_v3d_get_param p = {
600 .param = feature,
601 };
602 if (v3dv_ioctl(device->render_fd, DRM_IOCTL_V3D_GET_PARAM, &p) != 0)
603 return false;
604 return p.value;
605 }
606
607 static bool
device_has_expected_features(struct v3dv_physical_device * device)608 device_has_expected_features(struct v3dv_physical_device *device)
609 {
610 return v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_TFU) &&
611 v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_CSD) &&
612 v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_CACHE_FLUSH);
613 }
614
615
616 static VkResult
init_uuids(struct v3dv_physical_device * device)617 init_uuids(struct v3dv_physical_device *device)
618 {
619 const struct build_id_note *note =
620 build_id_find_nhdr_for_addr(init_uuids);
621 if (!note) {
622 return vk_errorf(device->vk.instance,
623 VK_ERROR_INITIALIZATION_FAILED,
624 "Failed to find build-id");
625 }
626
627 unsigned build_id_len = build_id_length(note);
628 if (build_id_len < 20) {
629 return vk_errorf(device->vk.instance,
630 VK_ERROR_INITIALIZATION_FAILED,
631 "build-id too short. It needs to be a SHA");
632 }
633
634 memcpy(device->driver_build_sha1, build_id_data(note), 20);
635
636 uint32_t vendor_id = v3dv_physical_device_vendor_id(device);
637 uint32_t device_id = v3dv_physical_device_device_id(device);
638
639 struct mesa_sha1 sha1_ctx;
640 uint8_t sha1[20];
641 STATIC_ASSERT(VK_UUID_SIZE <= sizeof(sha1));
642
643 /* The pipeline cache UUID is used for determining when a pipeline cache is
644 * invalid. It needs both a driver build and the PCI ID of the device.
645 */
646 _mesa_sha1_init(&sha1_ctx);
647 _mesa_sha1_update(&sha1_ctx, build_id_data(note), build_id_len);
648 _mesa_sha1_update(&sha1_ctx, &device_id, sizeof(device_id));
649 _mesa_sha1_final(&sha1_ctx, sha1);
650 memcpy(device->pipeline_cache_uuid, sha1, VK_UUID_SIZE);
651
652 /* The driver UUID is used for determining sharability of images and memory
653 * between two Vulkan instances in separate processes. People who want to
654 * share memory need to also check the device UUID (below) so all this
655 * needs to be is the build-id.
656 */
657 memcpy(device->driver_uuid, build_id_data(note), VK_UUID_SIZE);
658
659 /* The device UUID uniquely identifies the given device within the machine.
660 * Since we never have more than one device, this doesn't need to be a real
661 * UUID.
662 */
663 _mesa_sha1_init(&sha1_ctx);
664 _mesa_sha1_update(&sha1_ctx, &vendor_id, sizeof(vendor_id));
665 _mesa_sha1_update(&sha1_ctx, &device_id, sizeof(device_id));
666 _mesa_sha1_final(&sha1_ctx, sha1);
667 memcpy(device->device_uuid, sha1, VK_UUID_SIZE);
668
669 return VK_SUCCESS;
670 }
671
672 static void
v3dv_physical_device_init_disk_cache(struct v3dv_physical_device * device)673 v3dv_physical_device_init_disk_cache(struct v3dv_physical_device *device)
674 {
675 #ifdef ENABLE_SHADER_CACHE
676 char timestamp[41];
677 _mesa_sha1_format(timestamp, device->driver_build_sha1);
678
679 assert(device->name);
680 device->disk_cache = disk_cache_create(device->name, timestamp, 0);
681 #else
682 device->disk_cache = NULL;
683 #endif
684 }
685
686 static VkResult
physical_device_init(struct v3dv_physical_device * device,struct v3dv_instance * instance,drmDevicePtr drm_render_device,drmDevicePtr drm_primary_device)687 physical_device_init(struct v3dv_physical_device *device,
688 struct v3dv_instance *instance,
689 drmDevicePtr drm_render_device,
690 drmDevicePtr drm_primary_device)
691 {
692 VkResult result = VK_SUCCESS;
693 int32_t master_fd = -1;
694 int32_t render_fd = -1;
695
696 struct vk_physical_device_dispatch_table dispatch_table;
697 vk_physical_device_dispatch_table_from_entrypoints
698 (&dispatch_table, &v3dv_physical_device_entrypoints, true);
699 vk_physical_device_dispatch_table_from_entrypoints(
700 &dispatch_table, &wsi_physical_device_entrypoints, false);
701
702 result = vk_physical_device_init(&device->vk, &instance->vk, NULL,
703 &dispatch_table);
704
705 if (result != VK_SUCCESS)
706 goto fail;
707
708 assert(drm_render_device);
709 const char *path = drm_render_device->nodes[DRM_NODE_RENDER];
710 render_fd = open(path, O_RDWR | O_CLOEXEC);
711 if (render_fd < 0) {
712 fprintf(stderr, "Opening %s failed: %s\n", path, strerror(errno));
713 result = VK_ERROR_INCOMPATIBLE_DRIVER;
714 goto fail;
715 }
716
717 /* If we are running on VK_KHR_display we need to acquire the master
718 * display device now for the v3dv_wsi_init() call below. For anything else
719 * we postpone that until a swapchain is created.
720 */
721
722 const char *primary_path;
723 #if !using_v3d_simulator
724 if (drm_primary_device)
725 primary_path = drm_primary_device->nodes[DRM_NODE_PRIMARY];
726 else
727 primary_path = NULL;
728 #else
729 primary_path = drm_render_device->nodes[DRM_NODE_PRIMARY];
730 #endif
731
732 struct stat primary_stat = {0}, render_stat = {0};
733
734 device->has_primary = primary_path;
735 if (device->has_primary) {
736 if (stat(primary_path, &primary_stat) != 0) {
737 result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
738 "failed to stat DRM primary node %s",
739 primary_path);
740 goto fail;
741 }
742
743 device->primary_devid = primary_stat.st_rdev;
744 }
745
746 if (fstat(render_fd, &render_stat) != 0) {
747 result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
748 "failed to stat DRM render node %s",
749 path);
750 goto fail;
751 }
752 device->has_render = true;
753 device->render_devid = render_stat.st_rdev;
754
755 if (instance->vk.enabled_extensions.KHR_display) {
756 #if !using_v3d_simulator
757 /* Open the primary node on the vc4 display device */
758 assert(drm_primary_device);
759 master_fd = open(primary_path, O_RDWR | O_CLOEXEC);
760 #else
761 /* There is only one device with primary and render nodes.
762 * Open its primary node.
763 */
764 master_fd = open(primary_path, O_RDWR | O_CLOEXEC);
765 #endif
766 }
767
768 #if using_v3d_simulator
769 device->sim_file = v3d_simulator_init(render_fd);
770 #endif
771
772 device->render_fd = render_fd; /* The v3d render node */
773 device->display_fd = -1; /* Authenticated vc4 primary node */
774 device->master_fd = master_fd; /* Master vc4 primary node */
775
776 if (!v3d_get_device_info(device->render_fd, &device->devinfo, &v3dv_ioctl)) {
777 result = VK_ERROR_INCOMPATIBLE_DRIVER;
778 goto fail;
779 }
780
781 if (device->devinfo.ver < 42) {
782 result = VK_ERROR_INCOMPATIBLE_DRIVER;
783 goto fail;
784 }
785
786 if (!device_has_expected_features(device)) {
787 result = VK_ERROR_INCOMPATIBLE_DRIVER;
788 goto fail;
789 }
790
791 result = init_uuids(device);
792 if (result != VK_SUCCESS)
793 goto fail;
794
795 device->compiler = v3d_compiler_init(&device->devinfo);
796 device->next_program_id = 0;
797
798 ASSERTED int len =
799 asprintf(&device->name, "V3D %d.%d",
800 device->devinfo.ver / 10, device->devinfo.ver % 10);
801 assert(len != -1);
802
803 v3dv_physical_device_init_disk_cache(device);
804
805 /* Setup available memory heaps and types */
806 VkPhysicalDeviceMemoryProperties *mem = &device->memory;
807 mem->memoryHeapCount = 1;
808 mem->memoryHeaps[0].size = compute_heap_size();
809 mem->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
810
811 /* This is the only combination required by the spec */
812 mem->memoryTypeCount = 1;
813 mem->memoryTypes[0].propertyFlags =
814 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
815 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
816 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
817 mem->memoryTypes[0].heapIndex = 0;
818
819 device->options.merge_jobs = getenv("V3DV_NO_MERGE_JOBS") == NULL;
820
821 result = v3dv_wsi_init(device);
822 if (result != VK_SUCCESS) {
823 vk_error(instance, result);
824 goto fail;
825 }
826
827 get_device_extensions(device, &device->vk.supported_extensions);
828
829 pthread_mutex_init(&device->mutex, NULL);
830
831 return VK_SUCCESS;
832
833 fail:
834 vk_physical_device_finish(&device->vk);
835
836 if (render_fd >= 0)
837 close(render_fd);
838 if (master_fd >= 0)
839 close(master_fd);
840
841 return result;
842 }
843
844 static VkResult
enumerate_devices(struct v3dv_instance * instance)845 enumerate_devices(struct v3dv_instance *instance)
846 {
847 /* TODO: Check for more devices? */
848 drmDevicePtr devices[8];
849 VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
850 int max_devices;
851
852 instance->physicalDeviceCount = 0;
853
854 max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
855 if (max_devices < 1)
856 return VK_ERROR_INCOMPATIBLE_DRIVER;
857
858 #if !using_v3d_simulator
859 int32_t v3d_idx = -1;
860 int32_t vc4_idx = -1;
861 #endif
862 for (unsigned i = 0; i < (unsigned)max_devices; i++) {
863 #if using_v3d_simulator
864 /* In the simulator, we look for an Intel render node */
865 const int required_nodes = (1 << DRM_NODE_RENDER) | (1 << DRM_NODE_PRIMARY);
866 if ((devices[i]->available_nodes & required_nodes) == required_nodes &&
867 devices[i]->bustype == DRM_BUS_PCI &&
868 devices[i]->deviceinfo.pci->vendor_id == 0x8086) {
869 result = physical_device_init(&instance->physicalDevice, instance,
870 devices[i], NULL);
871 if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
872 break;
873 }
874 #else
875 /* On actual hardware, we should have a render node (v3d)
876 * and a primary node (vc4). We will need to use the primary
877 * to allocate WSI buffers and share them with the render node
878 * via prime, but that is a privileged operation so we need the
879 * primary node to be authenticated, and for that we need the
880 * display server to provide the device fd (with DRI3), so we
881 * here we only check that the device is present but we don't
882 * try to open it.
883 */
884 if (devices[i]->bustype != DRM_BUS_PLATFORM)
885 continue;
886
887 if (devices[i]->available_nodes & 1 << DRM_NODE_RENDER) {
888 char **compat = devices[i]->deviceinfo.platform->compatible;
889 while (*compat) {
890 if (strncmp(*compat, "brcm,2711-v3d", 13) == 0) {
891 v3d_idx = i;
892 break;
893 }
894 compat++;
895 }
896 } else if (devices[i]->available_nodes & 1 << DRM_NODE_PRIMARY) {
897 char **compat = devices[i]->deviceinfo.platform->compatible;
898 while (*compat) {
899 if (strncmp(*compat, "brcm,bcm2711-vc5", 16) == 0 ||
900 strncmp(*compat, "brcm,bcm2835-vc4", 16) == 0 ) {
901 vc4_idx = i;
902 break;
903 }
904 compat++;
905 }
906 }
907 #endif
908 }
909
910 #if !using_v3d_simulator
911 if (v3d_idx == -1 || vc4_idx == -1)
912 result = VK_ERROR_INCOMPATIBLE_DRIVER;
913 else
914 result = physical_device_init(&instance->physicalDevice, instance,
915 devices[v3d_idx], devices[vc4_idx]);
916 #endif
917
918 drmFreeDevices(devices, max_devices);
919
920 if (result == VK_SUCCESS)
921 instance->physicalDeviceCount = 1;
922
923 return result;
924 }
925
926 static VkResult
instance_ensure_physical_device(struct v3dv_instance * instance)927 instance_ensure_physical_device(struct v3dv_instance *instance)
928 {
929 if (instance->physicalDeviceCount < 0) {
930 VkResult result = enumerate_devices(instance);
931 if (result != VK_SUCCESS &&
932 result != VK_ERROR_INCOMPATIBLE_DRIVER)
933 return result;
934 }
935
936 return VK_SUCCESS;
937 }
938
939 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_EnumeratePhysicalDevices(VkInstance _instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)940 v3dv_EnumeratePhysicalDevices(VkInstance _instance,
941 uint32_t *pPhysicalDeviceCount,
942 VkPhysicalDevice *pPhysicalDevices)
943 {
944 V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
945 VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
946
947 VkResult result = instance_ensure_physical_device(instance);
948 if (result != VK_SUCCESS)
949 return result;
950
951 if (instance->physicalDeviceCount == 0)
952 return VK_SUCCESS;
953
954 assert(instance->physicalDeviceCount == 1);
955 vk_outarray_append(&out, i) {
956 *i = v3dv_physical_device_to_handle(&instance->physicalDevice);
957 }
958
959 return vk_outarray_status(&out);
960 }
961
962 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_EnumeratePhysicalDeviceGroups(VkInstance _instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)963 v3dv_EnumeratePhysicalDeviceGroups(
964 VkInstance _instance,
965 uint32_t *pPhysicalDeviceGroupCount,
966 VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
967 {
968 V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
969 VK_OUTARRAY_MAKE(out, pPhysicalDeviceGroupProperties,
970 pPhysicalDeviceGroupCount);
971
972 VkResult result = instance_ensure_physical_device(instance);
973 if (result != VK_SUCCESS)
974 return result;
975
976 assert(instance->physicalDeviceCount == 1);
977
978 vk_outarray_append(&out, p) {
979 p->physicalDeviceCount = 1;
980 memset(p->physicalDevices, 0, sizeof(p->physicalDevices));
981 p->physicalDevices[0] =
982 v3dv_physical_device_to_handle(&instance->physicalDevice);
983 p->subsetAllocation = false;
984
985 vk_foreach_struct(ext, p->pNext)
986 v3dv_debug_ignored_stype(ext->sType);
987 }
988
989 return vk_outarray_status(&out);
990 }
991
992 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)993 v3dv_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
994 VkPhysicalDeviceFeatures *pFeatures)
995 {
996 memset(pFeatures, 0, sizeof(*pFeatures));
997
998 *pFeatures = (VkPhysicalDeviceFeatures) {
999 .robustBufferAccess = true, /* This feature is mandatory */
1000 .fullDrawIndexUint32 = false, /* Only available since V3D 4.4.9.1 */
1001 .imageCubeArray = true,
1002 .independentBlend = true,
1003 .geometryShader = true,
1004 .tessellationShader = false,
1005 .sampleRateShading = true,
1006 .dualSrcBlend = false,
1007 .logicOp = true,
1008 .multiDrawIndirect = false,
1009 .drawIndirectFirstInstance = true,
1010 .depthClamp = false,
1011 .depthBiasClamp = true,
1012 .fillModeNonSolid = true,
1013 .depthBounds = false, /* Only available since V3D 4.3.16.2 */
1014 .wideLines = true,
1015 .largePoints = true,
1016 .alphaToOne = true,
1017 .multiViewport = false,
1018 .samplerAnisotropy = true,
1019 .textureCompressionETC2 = true,
1020 .textureCompressionASTC_LDR = true,
1021 /* Note that textureCompressionBC requires that the driver support all
1022 * the BC formats. V3D 4.2 only support the BC1-3, so we can't claim
1023 * that we support it.
1024 */
1025 .textureCompressionBC = false,
1026 .occlusionQueryPrecise = true,
1027 .pipelineStatisticsQuery = false,
1028 .vertexPipelineStoresAndAtomics = true,
1029 .fragmentStoresAndAtomics = true,
1030 .shaderTessellationAndGeometryPointSize = true,
1031 .shaderImageGatherExtended = false,
1032 .shaderStorageImageExtendedFormats = true,
1033 .shaderStorageImageMultisample = false,
1034 .shaderStorageImageReadWithoutFormat = false,
1035 .shaderStorageImageWriteWithoutFormat = false,
1036 .shaderUniformBufferArrayDynamicIndexing = false,
1037 .shaderSampledImageArrayDynamicIndexing = false,
1038 .shaderStorageBufferArrayDynamicIndexing = false,
1039 .shaderStorageImageArrayDynamicIndexing = false,
1040 .shaderClipDistance = true,
1041 .shaderCullDistance = false,
1042 .shaderFloat64 = false,
1043 .shaderInt64 = false,
1044 .shaderInt16 = false,
1045 .shaderResourceResidency = false,
1046 .shaderResourceMinLod = false,
1047 .sparseBinding = false,
1048 .sparseResidencyBuffer = false,
1049 .sparseResidencyImage2D = false,
1050 .sparseResidencyImage3D = false,
1051 .sparseResidency2Samples = false,
1052 .sparseResidency4Samples = false,
1053 .sparseResidency8Samples = false,
1054 .sparseResidency16Samples = false,
1055 .sparseResidencyAliased = false,
1056 .variableMultisampleRate = false,
1057 .inheritedQueries = true,
1058 };
1059 }
1060
1061 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)1062 v3dv_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
1063 VkPhysicalDeviceFeatures2 *pFeatures)
1064 {
1065 v3dv_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
1066
1067 VkPhysicalDeviceVulkan11Features vk11 = {
1068 .storageBuffer16BitAccess = false,
1069 .uniformAndStorageBuffer16BitAccess = false,
1070 .storagePushConstant16 = false,
1071 .storageInputOutput16 = false,
1072 .multiview = true,
1073 .multiviewGeometryShader = false,
1074 .multiviewTessellationShader = false,
1075 .variablePointersStorageBuffer = true,
1076 /* FIXME: this needs support for non-constant index on UBO/SSBO */
1077 .variablePointers = false,
1078 .protectedMemory = false,
1079 .samplerYcbcrConversion = false,
1080 .shaderDrawParameters = false,
1081 };
1082
1083 vk_foreach_struct(ext, pFeatures->pNext) {
1084 switch (ext->sType) {
1085 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
1086 VkPhysicalDeviceCustomBorderColorFeaturesEXT *features =
1087 (VkPhysicalDeviceCustomBorderColorFeaturesEXT *)ext;
1088 features->customBorderColors = true;
1089 features->customBorderColorWithoutFormat = false;
1090 break;
1091 }
1092
1093 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: {
1094 VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *features =
1095 (VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *)ext;
1096 features->uniformBufferStandardLayout = true;
1097 break;
1098 }
1099
1100 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
1101 VkPhysicalDevicePrivateDataFeaturesEXT *features =
1102 (VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
1103 features->privateData = true;
1104 break;
1105 }
1106
1107 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
1108 VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
1109 (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
1110 features->indexTypeUint8 = true;
1111 break;
1112 }
1113
1114 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: {
1115 VkPhysicalDeviceColorWriteEnableFeaturesEXT *features = (void *) ext;
1116 features->colorWriteEnable = true;
1117 break;
1118 }
1119
1120 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT: {
1121 VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT *features = (void *) ext;
1122 features->pipelineCreationCacheControl = true;
1123 break;
1124 }
1125
1126 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: {
1127 VkPhysicalDeviceProvokingVertexFeaturesEXT *features = (void *) ext;
1128 features->provokingVertexLast = true;
1129 /* FIXME: update when supporting EXT_transform_feedback */
1130 features->transformFeedbackPreservesProvokingVertex = false;
1131 break;
1132 }
1133
1134 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
1135 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
1136 (void *) ext;
1137 features->vertexAttributeInstanceRateDivisor = true;
1138 features->vertexAttributeInstanceRateZeroDivisor = false;
1139 break;
1140 }
1141
1142 /* Vulkan 1.1 */
1143 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: {
1144 VkPhysicalDeviceVulkan11Features *features =
1145 (VkPhysicalDeviceVulkan11Features *)ext;
1146 memcpy(features, &vk11, sizeof(VkPhysicalDeviceVulkan11Features));
1147 break;
1148 }
1149 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
1150 VkPhysicalDevice16BitStorageFeatures *features = (void *) ext;
1151 features->storageBuffer16BitAccess = vk11.storageBuffer16BitAccess;
1152 features->uniformAndStorageBuffer16BitAccess =
1153 vk11.uniformAndStorageBuffer16BitAccess;
1154 features->storagePushConstant16 = vk11.storagePushConstant16;
1155 features->storageInputOutput16 = vk11.storageInputOutput16;
1156 break;
1157 }
1158 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
1159 VkPhysicalDeviceMultiviewFeatures *features = (void *) ext;
1160 features->multiview = vk11.multiview;
1161 features->multiviewGeometryShader = vk11.multiviewGeometryShader;
1162 features->multiviewTessellationShader = vk11.multiviewTessellationShader;
1163 break;
1164 }
1165 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
1166 VkPhysicalDeviceProtectedMemoryFeatures *features = (void *) ext;
1167 features->protectedMemory = vk11.protectedMemory;
1168 break;
1169 }
1170 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
1171 VkPhysicalDeviceSamplerYcbcrConversionFeatures *features = (void *) ext;
1172 features->samplerYcbcrConversion = vk11.samplerYcbcrConversion;
1173 break;
1174 }
1175 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
1176 VkPhysicalDeviceShaderDrawParametersFeatures *features = (void *) ext;
1177 features->shaderDrawParameters = vk11.shaderDrawParameters;
1178 break;
1179 }
1180 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
1181 VkPhysicalDeviceVariablePointersFeatures *features = (void *) ext;
1182 features->variablePointersStorageBuffer =
1183 vk11.variablePointersStorageBuffer;
1184 features->variablePointers = vk11.variablePointers;
1185 break;
1186 }
1187
1188 default:
1189 v3dv_debug_ignored_stype(ext->sType);
1190 break;
1191 }
1192 }
1193 }
1194
1195 VKAPI_ATTR void VKAPI_CALL
v3dv_GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)1196 v3dv_GetDeviceGroupPeerMemoryFeatures(VkDevice device,
1197 uint32_t heapIndex,
1198 uint32_t localDeviceIndex,
1199 uint32_t remoteDeviceIndex,
1200 VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
1201 {
1202 assert(localDeviceIndex == 0 && remoteDeviceIndex == 0);
1203 *pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
1204 VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
1205 VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
1206 VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
1207 }
1208
1209 uint32_t
v3dv_physical_device_vendor_id(struct v3dv_physical_device * dev)1210 v3dv_physical_device_vendor_id(struct v3dv_physical_device *dev)
1211 {
1212 return 0x14E4; /* Broadcom */
1213 }
1214
1215
1216 #if using_v3d_simulator
1217 static bool
get_i915_param(int fd,uint32_t param,int * value)1218 get_i915_param(int fd, uint32_t param, int *value)
1219 {
1220 int tmp;
1221
1222 struct drm_i915_getparam gp = {
1223 .param = param,
1224 .value = &tmp,
1225 };
1226
1227 int ret = drmIoctl(fd, DRM_IOCTL_I915_GETPARAM, &gp);
1228 if (ret != 0)
1229 return false;
1230
1231 *value = tmp;
1232 return true;
1233 }
1234 #endif
1235
1236 uint32_t
v3dv_physical_device_device_id(struct v3dv_physical_device * dev)1237 v3dv_physical_device_device_id(struct v3dv_physical_device *dev)
1238 {
1239 #if using_v3d_simulator
1240 int devid = 0;
1241
1242 if (!get_i915_param(dev->render_fd, I915_PARAM_CHIPSET_ID, &devid))
1243 fprintf(stderr, "Error getting device_id\n");
1244
1245 return devid;
1246 #else
1247 switch (dev->devinfo.ver) {
1248 case 42:
1249 return 0xBE485FD3; /* Broadcom deviceID for 2711 */
1250 default:
1251 unreachable("Unsupported V3D version");
1252 }
1253 #endif
1254 }
1255
1256 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)1257 v3dv_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
1258 VkPhysicalDeviceProperties *pProperties)
1259 {
1260 V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
1261
1262 STATIC_ASSERT(MAX_SAMPLED_IMAGES + MAX_STORAGE_IMAGES + MAX_INPUT_ATTACHMENTS
1263 <= V3D_MAX_TEXTURE_SAMPLERS);
1264 STATIC_ASSERT(MAX_UNIFORM_BUFFERS >= MAX_DYNAMIC_UNIFORM_BUFFERS);
1265 STATIC_ASSERT(MAX_STORAGE_BUFFERS >= MAX_DYNAMIC_STORAGE_BUFFERS);
1266
1267 const uint32_t page_size = 4096;
1268 const uint32_t mem_size = compute_heap_size();
1269
1270 const uint32_t max_varying_components = 16 * 4;
1271
1272 const uint32_t v3d_coord_shift = 6;
1273
1274 const float v3d_point_line_granularity = 2.0f / (1 << v3d_coord_shift);
1275 const uint32_t max_fb_size = 4096;
1276
1277 const VkSampleCountFlags supported_sample_counts =
1278 VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1279
1280 struct timespec clock_res;
1281 clock_getres(CLOCK_MONOTONIC, &clock_res);
1282 const float timestamp_period =
1283 clock_res.tv_sec * 1000000000.0f + clock_res.tv_nsec;
1284
1285 /* FIXME: this will probably require an in-depth review */
1286 VkPhysicalDeviceLimits limits = {
1287 .maxImageDimension1D = 4096,
1288 .maxImageDimension2D = 4096,
1289 .maxImageDimension3D = 4096,
1290 .maxImageDimensionCube = 4096,
1291 .maxImageArrayLayers = 2048,
1292 .maxTexelBufferElements = (1ul << 28),
1293 .maxUniformBufferRange = V3D_MAX_BUFFER_RANGE,
1294 .maxStorageBufferRange = V3D_MAX_BUFFER_RANGE,
1295 .maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
1296 .maxMemoryAllocationCount = mem_size / page_size,
1297 .maxSamplerAllocationCount = 64 * 1024,
1298 .bufferImageGranularity = 256, /* A cache line */
1299 .sparseAddressSpaceSize = 0,
1300 .maxBoundDescriptorSets = MAX_SETS,
1301 .maxPerStageDescriptorSamplers = V3D_MAX_TEXTURE_SAMPLERS,
1302 .maxPerStageDescriptorUniformBuffers = MAX_UNIFORM_BUFFERS,
1303 .maxPerStageDescriptorStorageBuffers = MAX_STORAGE_BUFFERS,
1304 .maxPerStageDescriptorSampledImages = MAX_SAMPLED_IMAGES,
1305 .maxPerStageDescriptorStorageImages = MAX_STORAGE_IMAGES,
1306 .maxPerStageDescriptorInputAttachments = MAX_INPUT_ATTACHMENTS,
1307 .maxPerStageResources = 128,
1308
1309 /* Some of these limits are multiplied by 6 because they need to
1310 * include all possible shader stages (even if not supported). See
1311 * 'Required Limits' table in the Vulkan spec.
1312 */
1313 .maxDescriptorSetSamplers = 6 * V3D_MAX_TEXTURE_SAMPLERS,
1314 .maxDescriptorSetUniformBuffers = 6 * MAX_UNIFORM_BUFFERS,
1315 .maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
1316 .maxDescriptorSetStorageBuffers = 6 * MAX_STORAGE_BUFFERS,
1317 .maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
1318 .maxDescriptorSetSampledImages = 6 * MAX_SAMPLED_IMAGES,
1319 .maxDescriptorSetStorageImages = 6 * MAX_STORAGE_IMAGES,
1320 .maxDescriptorSetInputAttachments = MAX_INPUT_ATTACHMENTS,
1321
1322 /* Vertex limits */
1323 .maxVertexInputAttributes = MAX_VERTEX_ATTRIBS,
1324 .maxVertexInputBindings = MAX_VBS,
1325 .maxVertexInputAttributeOffset = 0xffffffff,
1326 .maxVertexInputBindingStride = 0xffffffff,
1327 .maxVertexOutputComponents = max_varying_components,
1328
1329 /* Tessellation limits */
1330 .maxTessellationGenerationLevel = 0,
1331 .maxTessellationPatchSize = 0,
1332 .maxTessellationControlPerVertexInputComponents = 0,
1333 .maxTessellationControlPerVertexOutputComponents = 0,
1334 .maxTessellationControlPerPatchOutputComponents = 0,
1335 .maxTessellationControlTotalOutputComponents = 0,
1336 .maxTessellationEvaluationInputComponents = 0,
1337 .maxTessellationEvaluationOutputComponents = 0,
1338
1339 /* Geometry limits */
1340 .maxGeometryShaderInvocations = 32,
1341 .maxGeometryInputComponents = 64,
1342 .maxGeometryOutputComponents = 64,
1343 .maxGeometryOutputVertices = 256,
1344 .maxGeometryTotalOutputComponents = 1024,
1345
1346 /* Fragment limits */
1347 .maxFragmentInputComponents = max_varying_components,
1348 .maxFragmentOutputAttachments = 4,
1349 .maxFragmentDualSrcAttachments = 0,
1350 .maxFragmentCombinedOutputResources = MAX_RENDER_TARGETS +
1351 MAX_STORAGE_BUFFERS +
1352 MAX_STORAGE_IMAGES,
1353
1354 /* Compute limits */
1355 .maxComputeSharedMemorySize = 16384,
1356 .maxComputeWorkGroupCount = { 65535, 65535, 65535 },
1357 .maxComputeWorkGroupInvocations = 256,
1358 .maxComputeWorkGroupSize = { 256, 256, 256 },
1359
1360 .subPixelPrecisionBits = v3d_coord_shift,
1361 .subTexelPrecisionBits = 8,
1362 .mipmapPrecisionBits = 8,
1363 .maxDrawIndexedIndexValue = 0x00ffffff,
1364 .maxDrawIndirectCount = 0x7fffffff,
1365 .maxSamplerLodBias = 14.0f,
1366 .maxSamplerAnisotropy = 16.0f,
1367 .maxViewports = MAX_VIEWPORTS,
1368 .maxViewportDimensions = { max_fb_size, max_fb_size },
1369 .viewportBoundsRange = { -2.0 * max_fb_size,
1370 2.0 * max_fb_size - 1 },
1371 .viewportSubPixelBits = 0,
1372 .minMemoryMapAlignment = page_size,
1373 .minTexelBufferOffsetAlignment = V3D_UIFBLOCK_SIZE,
1374 .minUniformBufferOffsetAlignment = 32,
1375 .minStorageBufferOffsetAlignment = 32,
1376 .minTexelOffset = -8,
1377 .maxTexelOffset = 7,
1378 .minTexelGatherOffset = -8,
1379 .maxTexelGatherOffset = 7,
1380 .minInterpolationOffset = -0.5,
1381 .maxInterpolationOffset = 0.5,
1382 .subPixelInterpolationOffsetBits = v3d_coord_shift,
1383 .maxFramebufferWidth = max_fb_size,
1384 .maxFramebufferHeight = max_fb_size,
1385 .maxFramebufferLayers = 256,
1386 .framebufferColorSampleCounts = supported_sample_counts,
1387 .framebufferDepthSampleCounts = supported_sample_counts,
1388 .framebufferStencilSampleCounts = supported_sample_counts,
1389 .framebufferNoAttachmentsSampleCounts = supported_sample_counts,
1390 .maxColorAttachments = MAX_RENDER_TARGETS,
1391 .sampledImageColorSampleCounts = supported_sample_counts,
1392 .sampledImageIntegerSampleCounts = supported_sample_counts,
1393 .sampledImageDepthSampleCounts = supported_sample_counts,
1394 .sampledImageStencilSampleCounts = supported_sample_counts,
1395 .storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
1396 .maxSampleMaskWords = 1,
1397 .timestampComputeAndGraphics = true,
1398 .timestampPeriod = timestamp_period,
1399 .maxClipDistances = 8,
1400 .maxCullDistances = 0,
1401 .maxCombinedClipAndCullDistances = 8,
1402 .discreteQueuePriorities = 2,
1403 .pointSizeRange = { v3d_point_line_granularity,
1404 V3D_MAX_POINT_SIZE },
1405 .lineWidthRange = { 1.0f, V3D_MAX_LINE_WIDTH },
1406 .pointSizeGranularity = v3d_point_line_granularity,
1407 .lineWidthGranularity = v3d_point_line_granularity,
1408 .strictLines = true,
1409 .standardSampleLocations = false,
1410 .optimalBufferCopyOffsetAlignment = 32,
1411 .optimalBufferCopyRowPitchAlignment = 32,
1412 .nonCoherentAtomSize = 256,
1413 };
1414
1415 *pProperties = (VkPhysicalDeviceProperties) {
1416 .apiVersion = V3DV_API_VERSION,
1417 .driverVersion = vk_get_driver_version(),
1418 .vendorID = v3dv_physical_device_vendor_id(pdevice),
1419 .deviceID = v3dv_physical_device_device_id(pdevice),
1420 .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
1421 .limits = limits,
1422 .sparseProperties = { 0 },
1423 };
1424
1425 snprintf(pProperties->deviceName, sizeof(pProperties->deviceName),
1426 "%s", pdevice->name);
1427 memcpy(pProperties->pipelineCacheUUID,
1428 pdevice->pipeline_cache_uuid, VK_UUID_SIZE);
1429 }
1430
1431 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1432 v3dv_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
1433 VkPhysicalDeviceProperties2 *pProperties)
1434 {
1435 V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
1436
1437 v3dv_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1438
1439 vk_foreach_struct(ext, pProperties->pNext) {
1440 switch (ext->sType) {
1441 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: {
1442 VkPhysicalDeviceCustomBorderColorPropertiesEXT *props =
1443 (VkPhysicalDeviceCustomBorderColorPropertiesEXT *)ext;
1444 props->maxCustomBorderColorSamplers = V3D_MAX_TEXTURE_SAMPLERS;
1445 break;
1446 }
1447 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: {
1448 VkPhysicalDeviceProvokingVertexPropertiesEXT *props =
1449 (VkPhysicalDeviceProvokingVertexPropertiesEXT *)ext;
1450 props->provokingVertexModePerPipeline = true;
1451 /* FIXME: update when supporting EXT_transform_feedback */
1452 props->transformFeedbackPreservesTriangleFanProvokingVertex = false;
1453 break;
1454 }
1455 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
1456 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *props =
1457 (VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
1458 props->maxVertexAttribDivisor = 0xffff;
1459 break;
1460 }
1461 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
1462 VkPhysicalDeviceIDProperties *id_props =
1463 (VkPhysicalDeviceIDProperties *)ext;
1464 memcpy(id_props->deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
1465 memcpy(id_props->driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
1466 /* The LUID is for Windows. */
1467 id_props->deviceLUIDValid = false;
1468 break;
1469 }
1470 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: {
1471 VkPhysicalDeviceDrmPropertiesEXT *props =
1472 (VkPhysicalDeviceDrmPropertiesEXT *)ext;
1473 props->hasPrimary = pdevice->has_primary;
1474 if (props->hasPrimary) {
1475 props->primaryMajor = (int64_t) major(pdevice->primary_devid);
1476 props->primaryMinor = (int64_t) minor(pdevice->primary_devid);
1477 }
1478 props->hasRender = pdevice->has_render;
1479 if (props->hasRender) {
1480 props->renderMajor = (int64_t) major(pdevice->render_devid);
1481 props->renderMinor = (int64_t) minor(pdevice->render_devid);
1482 }
1483 break;
1484 }
1485 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
1486 VkPhysicalDeviceMaintenance3Properties *props =
1487 (VkPhysicalDeviceMaintenance3Properties *)ext;
1488 /* We don't really have special restrictions for the maximum
1489 * descriptors per set, other than maybe not exceeding the limits
1490 * of addressable memory in a single allocation on either the host
1491 * or the GPU. This will be a much larger limit than any of the
1492 * per-stage limits already available in Vulkan though, so in practice,
1493 * it is not expected to limit anything beyond what is already
1494 * constrained through per-stage limits.
1495 */
1496 uint32_t max_host_descriptors =
1497 (UINT32_MAX - sizeof(struct v3dv_descriptor_set)) /
1498 sizeof(struct v3dv_descriptor);
1499 uint32_t max_gpu_descriptors =
1500 (UINT32_MAX / v3dv_X(pdevice, max_descriptor_bo_size)());
1501 props->maxPerSetDescriptors =
1502 MIN2(max_host_descriptors, max_gpu_descriptors);
1503
1504 /* Minimum required by the spec */
1505 props->maxMemoryAllocationSize = MAX_MEMORY_ALLOCATION_SIZE;
1506 break;
1507 }
1508 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
1509 VkPhysicalDeviceMultiviewProperties *props =
1510 (VkPhysicalDeviceMultiviewProperties *)ext;
1511 props->maxMultiviewViewCount = MAX_MULTIVIEW_VIEW_COUNT;
1512 props->maxMultiviewInstanceIndex = UINT32_MAX - 1;
1513 break;
1514 }
1515 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
1516 /* Do nothing, not even logging. This is a non-PCI device, so we will
1517 * never provide this extension.
1518 */
1519 break;
1520 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
1521 VkPhysicalDevicePointClippingProperties *props =
1522 (VkPhysicalDevicePointClippingProperties *)ext;
1523 props->pointClippingBehavior =
1524 VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
1525 break;
1526 }
1527 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: {
1528 VkPhysicalDeviceProtectedMemoryProperties *props =
1529 (VkPhysicalDeviceProtectedMemoryProperties *)ext;
1530 props->protectedNoFault = false;
1531 break;
1532 }
1533 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
1534 VkPhysicalDeviceSubgroupProperties *props =
1535 (VkPhysicalDeviceSubgroupProperties *)ext;
1536 props->subgroupSize = V3D_CHANNELS;
1537 props->supportedStages = VK_SHADER_STAGE_COMPUTE_BIT;
1538 props->supportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT;
1539 props->quadOperationsInAllStages = false;
1540 break;
1541 }
1542 default:
1543 v3dv_debug_ignored_stype(ext->sType);
1544 break;
1545 }
1546 }
1547 }
1548
1549 /* We support exactly one queue family. */
1550 static const VkQueueFamilyProperties
1551 v3dv_queue_family_properties = {
1552 .queueFlags = VK_QUEUE_GRAPHICS_BIT |
1553 VK_QUEUE_COMPUTE_BIT |
1554 VK_QUEUE_TRANSFER_BIT,
1555 .queueCount = 1,
1556 .timestampValidBits = 64,
1557 .minImageTransferGranularity = { 1, 1, 1 },
1558 };
1559
1560 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pCount,VkQueueFamilyProperties * pQueueFamilyProperties)1561 v3dv_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
1562 uint32_t *pCount,
1563 VkQueueFamilyProperties *pQueueFamilyProperties)
1564 {
1565 VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pCount);
1566
1567 vk_outarray_append(&out, p) {
1568 *p = v3dv_queue_family_properties;
1569 }
1570 }
1571
1572 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1573 v3dv_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
1574 uint32_t *pQueueFamilyPropertyCount,
1575 VkQueueFamilyProperties2 *pQueueFamilyProperties)
1576 {
1577 VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
1578
1579 vk_outarray_append(&out, p) {
1580 p->queueFamilyProperties = v3dv_queue_family_properties;
1581
1582 vk_foreach_struct(s, p->pNext) {
1583 v3dv_debug_ignored_stype(s->sType);
1584 }
1585 }
1586 }
1587
1588 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1589 v3dv_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
1590 VkPhysicalDeviceMemoryProperties *pMemoryProperties)
1591 {
1592 V3DV_FROM_HANDLE(v3dv_physical_device, device, physicalDevice);
1593 *pMemoryProperties = device->memory;
1594 }
1595
1596 VKAPI_ATTR void VKAPI_CALL
v3dv_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1597 v3dv_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
1598 VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
1599 {
1600 v3dv_GetPhysicalDeviceMemoryProperties(physicalDevice,
1601 &pMemoryProperties->memoryProperties);
1602
1603 vk_foreach_struct(ext, pMemoryProperties->pNext) {
1604 switch (ext->sType) {
1605 default:
1606 v3dv_debug_ignored_stype(ext->sType);
1607 break;
1608 }
1609 }
1610 }
1611
1612 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
v3dv_GetInstanceProcAddr(VkInstance _instance,const char * pName)1613 v3dv_GetInstanceProcAddr(VkInstance _instance,
1614 const char *pName)
1615 {
1616 V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1617 return vk_instance_get_proc_addr(&instance->vk,
1618 &v3dv_instance_entrypoints,
1619 pName);
1620 }
1621
1622 /* With version 1+ of the loader interface the ICD should expose
1623 * vk_icdGetInstanceProcAddr to work around certain LD_PRELOAD issues seen in apps.
1624 */
1625 PUBLIC
1626 VKAPI_ATTR PFN_vkVoidFunction
1627 VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance,
1628 const char *pName);
1629
1630 PUBLIC
1631 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)1632 vk_icdGetInstanceProcAddr(VkInstance instance,
1633 const char* pName)
1634 {
1635 return v3dv_GetInstanceProcAddr(instance, pName);
1636 }
1637
1638 /* With version 4+ of the loader interface the ICD should expose
1639 * vk_icdGetPhysicalDeviceProcAddr()
1640 */
1641 PUBLIC
1642 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1643 vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
1644 const char* pName);
1645
1646 PFN_vkVoidFunction
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,const char * pName)1647 vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
1648 const char* pName)
1649 {
1650 V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1651
1652 return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
1653 }
1654
1655 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_EnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1656 v3dv_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1657 VkLayerProperties *pProperties)
1658 {
1659 if (pProperties == NULL) {
1660 *pPropertyCount = 0;
1661 return VK_SUCCESS;
1662 }
1663
1664 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1665 }
1666
1667 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1668 v3dv_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
1669 uint32_t *pPropertyCount,
1670 VkLayerProperties *pProperties)
1671 {
1672 V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
1673
1674 if (pProperties == NULL) {
1675 *pPropertyCount = 0;
1676 return VK_SUCCESS;
1677 }
1678
1679 return vk_error(physical_device, VK_ERROR_LAYER_NOT_PRESENT);
1680 }
1681
1682 static VkResult
queue_init(struct v3dv_device * device,struct v3dv_queue * queue,const VkDeviceQueueCreateInfo * create_info,uint32_t index_in_family)1683 queue_init(struct v3dv_device *device, struct v3dv_queue *queue,
1684 const VkDeviceQueueCreateInfo *create_info,
1685 uint32_t index_in_family)
1686 {
1687 VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info,
1688 index_in_family);
1689 if (result != VK_SUCCESS)
1690 return result;
1691 queue->device = device;
1692 queue->noop_job = NULL;
1693 list_inithead(&queue->submit_wait_list);
1694 pthread_mutex_init(&queue->mutex, NULL);
1695 return VK_SUCCESS;
1696 }
1697
1698 static void
queue_finish(struct v3dv_queue * queue)1699 queue_finish(struct v3dv_queue *queue)
1700 {
1701 vk_queue_finish(&queue->vk);
1702 assert(list_is_empty(&queue->submit_wait_list));
1703 if (queue->noop_job)
1704 v3dv_job_destroy(queue->noop_job);
1705 pthread_mutex_destroy(&queue->mutex);
1706 }
1707
1708 static void
init_device_meta(struct v3dv_device * device)1709 init_device_meta(struct v3dv_device *device)
1710 {
1711 mtx_init(&device->meta.mtx, mtx_plain);
1712 v3dv_meta_clear_init(device);
1713 v3dv_meta_blit_init(device);
1714 v3dv_meta_texel_buffer_copy_init(device);
1715 }
1716
1717 static void
destroy_device_meta(struct v3dv_device * device)1718 destroy_device_meta(struct v3dv_device *device)
1719 {
1720 mtx_destroy(&device->meta.mtx);
1721 v3dv_meta_clear_finish(device);
1722 v3dv_meta_blit_finish(device);
1723 v3dv_meta_texel_buffer_copy_finish(device);
1724 }
1725
1726 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1727 v3dv_CreateDevice(VkPhysicalDevice physicalDevice,
1728 const VkDeviceCreateInfo *pCreateInfo,
1729 const VkAllocationCallbacks *pAllocator,
1730 VkDevice *pDevice)
1731 {
1732 V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
1733 struct v3dv_instance *instance = (struct v3dv_instance*) physical_device->vk.instance;
1734 VkResult result;
1735 struct v3dv_device *device;
1736
1737 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
1738
1739 /* Check requested queues (we only expose one queue ) */
1740 assert(pCreateInfo->queueCreateInfoCount == 1);
1741 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
1742 assert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex == 0);
1743 assert(pCreateInfo->pQueueCreateInfos[i].queueCount == 1);
1744 if (pCreateInfo->pQueueCreateInfos[i].flags != 0)
1745 return vk_error(instance, VK_ERROR_INITIALIZATION_FAILED);
1746 }
1747
1748 device = vk_zalloc2(&physical_device->vk.instance->alloc, pAllocator,
1749 sizeof(*device), 8,
1750 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1751 if (!device)
1752 return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1753
1754 struct vk_device_dispatch_table dispatch_table;
1755 vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1756 &v3dv_device_entrypoints, true);
1757 vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1758 &wsi_device_entrypoints, false);
1759 result = vk_device_init(&device->vk, &physical_device->vk,
1760 &dispatch_table, pCreateInfo, pAllocator);
1761 if (result != VK_SUCCESS) {
1762 vk_free(&device->vk.alloc, device);
1763 return vk_error(NULL, result);
1764 }
1765
1766 device->instance = instance;
1767 device->pdevice = physical_device;
1768
1769 if (pAllocator)
1770 device->vk.alloc = *pAllocator;
1771 else
1772 device->vk.alloc = physical_device->vk.instance->alloc;
1773
1774 pthread_mutex_init(&device->mutex, NULL);
1775
1776 result = queue_init(device, &device->queue,
1777 pCreateInfo->pQueueCreateInfos, 0);
1778 if (result != VK_SUCCESS)
1779 goto fail;
1780
1781 device->devinfo = physical_device->devinfo;
1782
1783 /* Vulkan 1.1 and VK_KHR_get_physical_device_properties2 added
1784 * VkPhysicalDeviceFeatures2 which can be used in the pNext chain of
1785 * vkDeviceCreateInfo, in which case it should be used instead of
1786 * pEnabledFeatures.
1787 */
1788 const VkPhysicalDeviceFeatures2 *features2 =
1789 vk_find_struct_const(pCreateInfo->pNext, PHYSICAL_DEVICE_FEATURES_2);
1790 if (features2) {
1791 memcpy(&device->features, &features2->features,
1792 sizeof(device->features));
1793 } else if (pCreateInfo->pEnabledFeatures) {
1794 memcpy(&device->features, pCreateInfo->pEnabledFeatures,
1795 sizeof(device->features));
1796 }
1797
1798 if (device->features.robustBufferAccess)
1799 perf_debug("Device created with Robust Buffer Access enabled.\n");
1800
1801 int ret = drmSyncobjCreate(physical_device->render_fd,
1802 DRM_SYNCOBJ_CREATE_SIGNALED,
1803 &device->last_job_sync);
1804 if (ret) {
1805 result = VK_ERROR_INITIALIZATION_FAILED;
1806 goto fail;
1807 }
1808
1809 #ifdef DEBUG
1810 v3dv_X(device, device_check_prepacked_sizes)();
1811 #endif
1812 init_device_meta(device);
1813 v3dv_bo_cache_init(device);
1814 v3dv_pipeline_cache_init(&device->default_pipeline_cache, device, 0,
1815 device->instance->default_pipeline_cache_enabled);
1816 device->default_attribute_float =
1817 v3dv_pipeline_create_default_attribute_values(device, NULL);
1818
1819 *pDevice = v3dv_device_to_handle(device);
1820
1821 return VK_SUCCESS;
1822
1823 fail:
1824 vk_device_finish(&device->vk);
1825 vk_free(&device->vk.alloc, device);
1826
1827 return result;
1828 }
1829
1830 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyDevice(VkDevice _device,const VkAllocationCallbacks * pAllocator)1831 v3dv_DestroyDevice(VkDevice _device,
1832 const VkAllocationCallbacks *pAllocator)
1833 {
1834 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1835
1836 v3dv_DeviceWaitIdle(_device);
1837 queue_finish(&device->queue);
1838 pthread_mutex_destroy(&device->mutex);
1839 drmSyncobjDestroy(device->pdevice->render_fd, device->last_job_sync);
1840 destroy_device_meta(device);
1841 v3dv_pipeline_cache_finish(&device->default_pipeline_cache);
1842
1843 if (device->default_attribute_float) {
1844 v3dv_bo_free(device, device->default_attribute_float);
1845 device->default_attribute_float = NULL;
1846 }
1847
1848 /* Bo cache should be removed the last, as any other object could be
1849 * freeing their private bos
1850 */
1851 v3dv_bo_cache_destroy(device);
1852
1853 vk_device_finish(&device->vk);
1854 vk_free2(&device->vk.alloc, pAllocator, device);
1855 }
1856
1857 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_DeviceWaitIdle(VkDevice _device)1858 v3dv_DeviceWaitIdle(VkDevice _device)
1859 {
1860 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1861 return v3dv_QueueWaitIdle(v3dv_queue_to_handle(&device->queue));
1862 }
1863
1864 static VkResult
device_alloc(struct v3dv_device * device,struct v3dv_device_memory * mem,VkDeviceSize size)1865 device_alloc(struct v3dv_device *device,
1866 struct v3dv_device_memory *mem,
1867 VkDeviceSize size)
1868 {
1869 /* Our kernel interface is 32-bit */
1870 assert(size <= UINT32_MAX);
1871
1872 mem->bo = v3dv_bo_alloc(device, size, "device_alloc", false);
1873 if (!mem->bo)
1874 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1875
1876 return VK_SUCCESS;
1877 }
1878
1879 static void
device_free_wsi_dumb(int32_t display_fd,int32_t dumb_handle)1880 device_free_wsi_dumb(int32_t display_fd, int32_t dumb_handle)
1881 {
1882 assert(display_fd != -1);
1883 if (dumb_handle < 0)
1884 return;
1885
1886 struct drm_mode_destroy_dumb destroy_dumb = {
1887 .handle = dumb_handle,
1888 };
1889 if (v3dv_ioctl(display_fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_dumb)) {
1890 fprintf(stderr, "destroy dumb object %d: %s\n", dumb_handle, strerror(errno));
1891 }
1892 }
1893
1894 static void
device_free(struct v3dv_device * device,struct v3dv_device_memory * mem)1895 device_free(struct v3dv_device *device, struct v3dv_device_memory *mem)
1896 {
1897 /* If this memory allocation was for WSI, then we need to use the
1898 * display device to free the allocated dumb BO.
1899 */
1900 if (mem->is_for_wsi) {
1901 assert(mem->has_bo_ownership);
1902 device_free_wsi_dumb(device->instance->physicalDevice.display_fd,
1903 mem->bo->dumb_handle);
1904 }
1905
1906 if (mem->has_bo_ownership)
1907 v3dv_bo_free(device, mem->bo);
1908 else if (mem->bo)
1909 vk_free(&device->vk.alloc, mem->bo);
1910 }
1911
1912 static void
device_unmap(struct v3dv_device * device,struct v3dv_device_memory * mem)1913 device_unmap(struct v3dv_device *device, struct v3dv_device_memory *mem)
1914 {
1915 assert(mem && mem->bo->map && mem->bo->map_size > 0);
1916 v3dv_bo_unmap(device, mem->bo);
1917 }
1918
1919 static VkResult
device_map(struct v3dv_device * device,struct v3dv_device_memory * mem)1920 device_map(struct v3dv_device *device, struct v3dv_device_memory *mem)
1921 {
1922 assert(mem && mem->bo);
1923
1924 /* From the spec:
1925 *
1926 * "After a successful call to vkMapMemory the memory object memory is
1927 * considered to be currently host mapped. It is an application error to
1928 * call vkMapMemory on a memory object that is already host mapped."
1929 *
1930 * We are not concerned with this ourselves (validation layers should
1931 * catch these errors and warn users), however, the driver may internally
1932 * map things (for example for debug CLIF dumps or some CPU-side operations)
1933 * so by the time the user calls here the buffer might already been mapped
1934 * internally by the driver.
1935 */
1936 if (mem->bo->map) {
1937 assert(mem->bo->map_size == mem->bo->size);
1938 return VK_SUCCESS;
1939 }
1940
1941 bool ok = v3dv_bo_map(device, mem->bo, mem->bo->size);
1942 if (!ok)
1943 return VK_ERROR_MEMORY_MAP_FAILED;
1944
1945 return VK_SUCCESS;
1946 }
1947
1948 static VkResult
device_import_bo(struct v3dv_device * device,const VkAllocationCallbacks * pAllocator,int fd,uint64_t size,struct v3dv_bo ** bo)1949 device_import_bo(struct v3dv_device *device,
1950 const VkAllocationCallbacks *pAllocator,
1951 int fd, uint64_t size,
1952 struct v3dv_bo **bo)
1953 {
1954 VkResult result;
1955
1956 *bo = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(struct v3dv_bo), 8,
1957 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1958 if (*bo == NULL) {
1959 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1960 goto fail;
1961 }
1962
1963 off_t real_size = lseek(fd, 0, SEEK_END);
1964 lseek(fd, 0, SEEK_SET);
1965 if (real_size < 0 || (uint64_t) real_size < size) {
1966 result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1967 goto fail;
1968 }
1969
1970 int render_fd = device->pdevice->render_fd;
1971 assert(render_fd >= 0);
1972
1973 int ret;
1974 uint32_t handle;
1975 ret = drmPrimeFDToHandle(render_fd, fd, &handle);
1976 if (ret) {
1977 result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1978 goto fail;
1979 }
1980
1981 struct drm_v3d_get_bo_offset get_offset = {
1982 .handle = handle,
1983 };
1984 ret = v3dv_ioctl(render_fd, DRM_IOCTL_V3D_GET_BO_OFFSET, &get_offset);
1985 if (ret) {
1986 result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1987 goto fail;
1988 }
1989 assert(get_offset.offset != 0);
1990
1991 v3dv_bo_init(*bo, handle, size, get_offset.offset, "import", false);
1992
1993 return VK_SUCCESS;
1994
1995 fail:
1996 if (*bo) {
1997 vk_free2(&device->vk.alloc, pAllocator, *bo);
1998 *bo = NULL;
1999 }
2000 return result;
2001 }
2002
2003 static VkResult
device_alloc_for_wsi(struct v3dv_device * device,const VkAllocationCallbacks * pAllocator,struct v3dv_device_memory * mem,VkDeviceSize size)2004 device_alloc_for_wsi(struct v3dv_device *device,
2005 const VkAllocationCallbacks *pAllocator,
2006 struct v3dv_device_memory *mem,
2007 VkDeviceSize size)
2008 {
2009 /* In the simulator we can get away with a regular allocation since both
2010 * allocation and rendering happen in the same DRM render node. On actual
2011 * hardware we need to allocate our winsys BOs on the vc4 display device
2012 * and import them into v3d.
2013 */
2014 #if using_v3d_simulator
2015 return device_alloc(device, mem, size);
2016 #else
2017 /* If we are allocating for WSI we should have a swapchain and thus,
2018 * we should've initialized the display device. However, Zink doesn't
2019 * use swapchains, so in that case we can get here without acquiring the
2020 * display device and we need to do it now.
2021 */
2022 VkResult result;
2023 struct v3dv_instance *instance = device->instance;
2024 struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2025 if (unlikely(pdevice->display_fd < 0)) {
2026 result = v3dv_physical_device_acquire_display(instance, pdevice, NULL);
2027 if (result != VK_SUCCESS)
2028 return result;
2029 }
2030 assert(pdevice->display_fd != -1);
2031
2032 mem->is_for_wsi = true;
2033
2034 int display_fd = pdevice->display_fd;
2035 struct drm_mode_create_dumb create_dumb = {
2036 .width = 1024, /* one page */
2037 .height = align(size, 4096) / 4096,
2038 .bpp = util_format_get_blocksizebits(PIPE_FORMAT_RGBA8888_UNORM),
2039 };
2040
2041 int err;
2042 err = v3dv_ioctl(display_fd, DRM_IOCTL_MODE_CREATE_DUMB, &create_dumb);
2043 if (err < 0)
2044 goto fail_create;
2045
2046 int fd;
2047 err =
2048 drmPrimeHandleToFD(display_fd, create_dumb.handle, O_CLOEXEC, &fd);
2049 if (err < 0)
2050 goto fail_export;
2051
2052 result = device_import_bo(device, pAllocator, fd, size, &mem->bo);
2053 close(fd);
2054 if (result != VK_SUCCESS)
2055 goto fail_import;
2056
2057 mem->bo->dumb_handle = create_dumb.handle;
2058 return VK_SUCCESS;
2059
2060 fail_import:
2061 fail_export:
2062 device_free_wsi_dumb(display_fd, create_dumb.handle);
2063
2064 fail_create:
2065 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2066 #endif
2067 }
2068
2069 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)2070 v3dv_AllocateMemory(VkDevice _device,
2071 const VkMemoryAllocateInfo *pAllocateInfo,
2072 const VkAllocationCallbacks *pAllocator,
2073 VkDeviceMemory *pMem)
2074 {
2075 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2076 struct v3dv_device_memory *mem;
2077 struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2078
2079 assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
2080
2081 /* The Vulkan 1.0.33 spec says "allocationSize must be greater than 0". */
2082 assert(pAllocateInfo->allocationSize > 0);
2083
2084 mem = vk_object_zalloc(&device->vk, pAllocator, sizeof(*mem),
2085 VK_OBJECT_TYPE_DEVICE_MEMORY);
2086 if (mem == NULL)
2087 return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
2088
2089 assert(pAllocateInfo->memoryTypeIndex < pdevice->memory.memoryTypeCount);
2090 mem->type = &pdevice->memory.memoryTypes[pAllocateInfo->memoryTypeIndex];
2091 mem->has_bo_ownership = true;
2092 mem->is_for_wsi = false;
2093
2094 const struct wsi_memory_allocate_info *wsi_info = NULL;
2095 const VkImportMemoryFdInfoKHR *fd_info = NULL;
2096 vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
2097 switch ((unsigned)ext->sType) {
2098 case VK_STRUCTURE_TYPE_WSI_MEMORY_ALLOCATE_INFO_MESA:
2099 wsi_info = (void *)ext;
2100 break;
2101 case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
2102 fd_info = (void *)ext;
2103 break;
2104 case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
2105 /* We don't support VK_KHR_buffer_device_address or multiple
2106 * devices per device group, so we can ignore this.
2107 */
2108 break;
2109 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR:
2110 /* We don't have particular optimizations associated with memory
2111 * allocations that won't be suballocated to multiple resources.
2112 */
2113 break;
2114 case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR:
2115 /* The mask of handle types specified here must be supported
2116 * according to VkExternalImageFormatProperties, so it must be
2117 * fd or dmabuf, which don't have special requirements for us.
2118 */
2119 break;
2120 default:
2121 v3dv_debug_ignored_stype(ext->sType);
2122 break;
2123 }
2124 }
2125
2126 VkResult result = VK_SUCCESS;
2127
2128 /* We always allocate device memory in multiples of a page, so round up
2129 * requested size to that.
2130 */
2131 VkDeviceSize alloc_size = ALIGN(pAllocateInfo->allocationSize, 4096);
2132
2133 if (unlikely(alloc_size > MAX_MEMORY_ALLOCATION_SIZE)) {
2134 result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
2135 } else {
2136 if (wsi_info) {
2137 result = device_alloc_for_wsi(device, pAllocator, mem, alloc_size);
2138 } else if (fd_info && fd_info->handleType) {
2139 assert(fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2140 fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2141 result = device_import_bo(device, pAllocator,
2142 fd_info->fd, alloc_size, &mem->bo);
2143 mem->has_bo_ownership = false;
2144 if (result == VK_SUCCESS)
2145 close(fd_info->fd);
2146 } else {
2147 result = device_alloc(device, mem, alloc_size);
2148 }
2149 }
2150
2151 if (result != VK_SUCCESS) {
2152 vk_object_free(&device->vk, pAllocator, mem);
2153 return vk_error(device, result);
2154 }
2155
2156 *pMem = v3dv_device_memory_to_handle(mem);
2157 return result;
2158 }
2159
2160 VKAPI_ATTR void VKAPI_CALL
v3dv_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)2161 v3dv_FreeMemory(VkDevice _device,
2162 VkDeviceMemory _mem,
2163 const VkAllocationCallbacks *pAllocator)
2164 {
2165 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2166 V3DV_FROM_HANDLE(v3dv_device_memory, mem, _mem);
2167
2168 if (mem == NULL)
2169 return;
2170
2171 if (mem->bo->map)
2172 v3dv_UnmapMemory(_device, _mem);
2173
2174 device_free(device, mem);
2175
2176 vk_object_free(&device->vk, pAllocator, mem);
2177 }
2178
2179 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_MapMemory(VkDevice _device,VkDeviceMemory _memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)2180 v3dv_MapMemory(VkDevice _device,
2181 VkDeviceMemory _memory,
2182 VkDeviceSize offset,
2183 VkDeviceSize size,
2184 VkMemoryMapFlags flags,
2185 void **ppData)
2186 {
2187 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2188 V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
2189
2190 if (mem == NULL) {
2191 *ppData = NULL;
2192 return VK_SUCCESS;
2193 }
2194
2195 assert(offset < mem->bo->size);
2196
2197 /* Since the driver can map BOs internally as well and the mapped range
2198 * required by the user or the driver might not be the same, we always map
2199 * the entire BO and then add the requested offset to the start address
2200 * of the mapped region.
2201 */
2202 VkResult result = device_map(device, mem);
2203 if (result != VK_SUCCESS)
2204 return vk_error(device, result);
2205
2206 *ppData = ((uint8_t *) mem->bo->map) + offset;
2207 return VK_SUCCESS;
2208 }
2209
2210 VKAPI_ATTR void VKAPI_CALL
v3dv_UnmapMemory(VkDevice _device,VkDeviceMemory _memory)2211 v3dv_UnmapMemory(VkDevice _device,
2212 VkDeviceMemory _memory)
2213 {
2214 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2215 V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
2216
2217 if (mem == NULL)
2218 return;
2219
2220 device_unmap(device, mem);
2221 }
2222
2223 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)2224 v3dv_FlushMappedMemoryRanges(VkDevice _device,
2225 uint32_t memoryRangeCount,
2226 const VkMappedMemoryRange *pMemoryRanges)
2227 {
2228 return VK_SUCCESS;
2229 }
2230
2231 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)2232 v3dv_InvalidateMappedMemoryRanges(VkDevice _device,
2233 uint32_t memoryRangeCount,
2234 const VkMappedMemoryRange *pMemoryRanges)
2235 {
2236 return VK_SUCCESS;
2237 }
2238
2239 VKAPI_ATTR void VKAPI_CALL
v3dv_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2240 v3dv_GetImageMemoryRequirements2(VkDevice device,
2241 const VkImageMemoryRequirementsInfo2 *pInfo,
2242 VkMemoryRequirements2 *pMemoryRequirements)
2243 {
2244 V3DV_FROM_HANDLE(v3dv_image, image, pInfo->image);
2245
2246 pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
2247 .memoryTypeBits = 0x1,
2248 .alignment = image->alignment,
2249 .size = image->size
2250 };
2251
2252 vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2253 switch (ext->sType) {
2254 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2255 VkMemoryDedicatedRequirements *req =
2256 (VkMemoryDedicatedRequirements *) ext;
2257 req->requiresDedicatedAllocation = image->vk.external_handle_types != 0;
2258 req->prefersDedicatedAllocation = image->vk.external_handle_types != 0;
2259 break;
2260 }
2261 default:
2262 v3dv_debug_ignored_stype(ext->sType);
2263 break;
2264 }
2265 }
2266 }
2267
2268 static void
bind_image_memory(const VkBindImageMemoryInfo * info)2269 bind_image_memory(const VkBindImageMemoryInfo *info)
2270 {
2271 V3DV_FROM_HANDLE(v3dv_image, image, info->image);
2272 V3DV_FROM_HANDLE(v3dv_device_memory, mem, info->memory);
2273
2274 /* Valid usage:
2275 *
2276 * "memoryOffset must be an integer multiple of the alignment member of
2277 * the VkMemoryRequirements structure returned from a call to
2278 * vkGetImageMemoryRequirements with image"
2279 */
2280 assert(info->memoryOffset % image->alignment == 0);
2281 assert(info->memoryOffset < mem->bo->size);
2282
2283 image->mem = mem;
2284 image->mem_offset = info->memoryOffset;
2285 }
2286
2287 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_BindImageMemory2(VkDevice _device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2288 v3dv_BindImageMemory2(VkDevice _device,
2289 uint32_t bindInfoCount,
2290 const VkBindImageMemoryInfo *pBindInfos)
2291 {
2292 for (uint32_t i = 0; i < bindInfoCount; i++) {
2293 const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
2294 vk_find_struct_const(pBindInfos->pNext,
2295 BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR);
2296 if (swapchain_info && swapchain_info->swapchain) {
2297 struct v3dv_image *swapchain_image =
2298 v3dv_wsi_get_image_from_swapchain(swapchain_info->swapchain,
2299 swapchain_info->imageIndex);
2300 VkBindImageMemoryInfo swapchain_bind = {
2301 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2302 .image = pBindInfos[i].image,
2303 .memory = v3dv_device_memory_to_handle(swapchain_image->mem),
2304 .memoryOffset = swapchain_image->mem_offset,
2305 };
2306 bind_image_memory(&swapchain_bind);
2307 } else {
2308 bind_image_memory(&pBindInfos[i]);
2309 }
2310 }
2311
2312 return VK_SUCCESS;
2313 }
2314
2315 VKAPI_ATTR void VKAPI_CALL
v3dv_GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2316 v3dv_GetBufferMemoryRequirements2(VkDevice device,
2317 const VkBufferMemoryRequirementsInfo2 *pInfo,
2318 VkMemoryRequirements2 *pMemoryRequirements)
2319 {
2320 V3DV_FROM_HANDLE(v3dv_buffer, buffer, pInfo->buffer);
2321
2322 pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
2323 .memoryTypeBits = 0x1,
2324 .alignment = buffer->alignment,
2325 .size = align64(buffer->size, buffer->alignment),
2326 };
2327
2328 vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2329 switch (ext->sType) {
2330 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2331 VkMemoryDedicatedRequirements *req =
2332 (VkMemoryDedicatedRequirements *) ext;
2333 req->requiresDedicatedAllocation = false;
2334 req->prefersDedicatedAllocation = false;
2335 break;
2336 }
2337 default:
2338 v3dv_debug_ignored_stype(ext->sType);
2339 break;
2340 }
2341 }
2342 }
2343
2344 static void
bind_buffer_memory(const VkBindBufferMemoryInfo * info)2345 bind_buffer_memory(const VkBindBufferMemoryInfo *info)
2346 {
2347 V3DV_FROM_HANDLE(v3dv_buffer, buffer, info->buffer);
2348 V3DV_FROM_HANDLE(v3dv_device_memory, mem, info->memory);
2349
2350 /* Valid usage:
2351 *
2352 * "memoryOffset must be an integer multiple of the alignment member of
2353 * the VkMemoryRequirements structure returned from a call to
2354 * vkGetBufferMemoryRequirements with buffer"
2355 */
2356 assert(info->memoryOffset % buffer->alignment == 0);
2357 assert(info->memoryOffset < mem->bo->size);
2358
2359 buffer->mem = mem;
2360 buffer->mem_offset = info->memoryOffset;
2361 }
2362
2363
2364 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_BindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2365 v3dv_BindBufferMemory2(VkDevice device,
2366 uint32_t bindInfoCount,
2367 const VkBindBufferMemoryInfo *pBindInfos)
2368 {
2369 for (uint32_t i = 0; i < bindInfoCount; i++)
2370 bind_buffer_memory(&pBindInfos[i]);
2371
2372 return VK_SUCCESS;
2373 }
2374
2375 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateBuffer(VkDevice _device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)2376 v3dv_CreateBuffer(VkDevice _device,
2377 const VkBufferCreateInfo *pCreateInfo,
2378 const VkAllocationCallbacks *pAllocator,
2379 VkBuffer *pBuffer)
2380 {
2381 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2382 struct v3dv_buffer *buffer;
2383
2384 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
2385 assert(pCreateInfo->usage != 0);
2386
2387 /* We don't support any flags for now */
2388 assert(pCreateInfo->flags == 0);
2389
2390 buffer = vk_object_zalloc(&device->vk, pAllocator, sizeof(*buffer),
2391 VK_OBJECT_TYPE_BUFFER);
2392 if (buffer == NULL)
2393 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2394
2395 buffer->size = pCreateInfo->size;
2396 buffer->usage = pCreateInfo->usage;
2397 buffer->alignment = 256; /* nonCoherentAtomSize */
2398
2399 /* Limit allocations to 32-bit */
2400 const VkDeviceSize aligned_size = align64(buffer->size, buffer->alignment);
2401 if (aligned_size > UINT32_MAX || aligned_size < buffer->size)
2402 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2403
2404 *pBuffer = v3dv_buffer_to_handle(buffer);
2405
2406 return VK_SUCCESS;
2407 }
2408
2409 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyBuffer(VkDevice _device,VkBuffer _buffer,const VkAllocationCallbacks * pAllocator)2410 v3dv_DestroyBuffer(VkDevice _device,
2411 VkBuffer _buffer,
2412 const VkAllocationCallbacks *pAllocator)
2413 {
2414 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2415 V3DV_FROM_HANDLE(v3dv_buffer, buffer, _buffer);
2416
2417 if (!buffer)
2418 return;
2419
2420 vk_object_free(&device->vk, pAllocator, buffer);
2421 }
2422
2423 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateFramebuffer(VkDevice _device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)2424 v3dv_CreateFramebuffer(VkDevice _device,
2425 const VkFramebufferCreateInfo *pCreateInfo,
2426 const VkAllocationCallbacks *pAllocator,
2427 VkFramebuffer *pFramebuffer)
2428 {
2429 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2430 struct v3dv_framebuffer *framebuffer;
2431
2432 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2433
2434 size_t size = sizeof(*framebuffer) +
2435 sizeof(struct v3dv_image_view *) * pCreateInfo->attachmentCount;
2436 framebuffer = vk_object_zalloc(&device->vk, pAllocator, size,
2437 VK_OBJECT_TYPE_FRAMEBUFFER);
2438 if (framebuffer == NULL)
2439 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2440
2441 framebuffer->width = pCreateInfo->width;
2442 framebuffer->height = pCreateInfo->height;
2443 framebuffer->layers = pCreateInfo->layers;
2444 framebuffer->has_edge_padding = true;
2445
2446 framebuffer->attachment_count = pCreateInfo->attachmentCount;
2447 framebuffer->color_attachment_count = 0;
2448 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2449 framebuffer->attachments[i] =
2450 v3dv_image_view_from_handle(pCreateInfo->pAttachments[i]);
2451 if (framebuffer->attachments[i]->vk.aspects & VK_IMAGE_ASPECT_COLOR_BIT)
2452 framebuffer->color_attachment_count++;
2453 }
2454
2455 *pFramebuffer = v3dv_framebuffer_to_handle(framebuffer);
2456
2457 return VK_SUCCESS;
2458 }
2459
2460 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyFramebuffer(VkDevice _device,VkFramebuffer _fb,const VkAllocationCallbacks * pAllocator)2461 v3dv_DestroyFramebuffer(VkDevice _device,
2462 VkFramebuffer _fb,
2463 const VkAllocationCallbacks *pAllocator)
2464 {
2465 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2466 V3DV_FROM_HANDLE(v3dv_framebuffer, fb, _fb);
2467
2468 if (!fb)
2469 return;
2470
2471 vk_object_free(&device->vk, pAllocator, fb);
2472 }
2473
2474 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)2475 v3dv_GetMemoryFdPropertiesKHR(VkDevice _device,
2476 VkExternalMemoryHandleTypeFlagBits handleType,
2477 int fd,
2478 VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2479 {
2480 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2481 struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2482
2483 switch (handleType) {
2484 case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
2485 pMemoryFdProperties->memoryTypeBits =
2486 (1 << pdevice->memory.memoryTypeCount) - 1;
2487 return VK_SUCCESS;
2488 default:
2489 return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
2490 }
2491 }
2492
2493 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFd)2494 v3dv_GetMemoryFdKHR(VkDevice _device,
2495 const VkMemoryGetFdInfoKHR *pGetFdInfo,
2496 int *pFd)
2497 {
2498 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2499 V3DV_FROM_HANDLE(v3dv_device_memory, mem, pGetFdInfo->memory);
2500
2501 assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2502 assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2503 pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2504
2505 int fd, ret;
2506 ret = drmPrimeHandleToFD(device->pdevice->render_fd,
2507 mem->bo->handle,
2508 DRM_CLOEXEC, &fd);
2509 if (ret)
2510 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2511
2512 *pFd = fd;
2513
2514 return VK_SUCCESS;
2515 }
2516
2517 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateEvent(VkDevice _device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)2518 v3dv_CreateEvent(VkDevice _device,
2519 const VkEventCreateInfo *pCreateInfo,
2520 const VkAllocationCallbacks *pAllocator,
2521 VkEvent *pEvent)
2522 {
2523 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2524 struct v3dv_event *event =
2525 vk_object_zalloc(&device->vk, pAllocator, sizeof(*event),
2526 VK_OBJECT_TYPE_EVENT);
2527 if (!event)
2528 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2529
2530 /* Events are created in the unsignaled state */
2531 event->state = false;
2532 *pEvent = v3dv_event_to_handle(event);
2533
2534 return VK_SUCCESS;
2535 }
2536
2537 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyEvent(VkDevice _device,VkEvent _event,const VkAllocationCallbacks * pAllocator)2538 v3dv_DestroyEvent(VkDevice _device,
2539 VkEvent _event,
2540 const VkAllocationCallbacks *pAllocator)
2541 {
2542 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2543 V3DV_FROM_HANDLE(v3dv_event, event, _event);
2544
2545 if (!event)
2546 return;
2547
2548 vk_object_free(&device->vk, pAllocator, event);
2549 }
2550
2551 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_GetEventStatus(VkDevice _device,VkEvent _event)2552 v3dv_GetEventStatus(VkDevice _device, VkEvent _event)
2553 {
2554 V3DV_FROM_HANDLE(v3dv_event, event, _event);
2555 return p_atomic_read(&event->state) ? VK_EVENT_SET : VK_EVENT_RESET;
2556 }
2557
2558 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_SetEvent(VkDevice _device,VkEvent _event)2559 v3dv_SetEvent(VkDevice _device, VkEvent _event)
2560 {
2561 V3DV_FROM_HANDLE(v3dv_event, event, _event);
2562 p_atomic_set(&event->state, 1);
2563 return VK_SUCCESS;
2564 }
2565
2566 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_ResetEvent(VkDevice _device,VkEvent _event)2567 v3dv_ResetEvent(VkDevice _device, VkEvent _event)
2568 {
2569 V3DV_FROM_HANDLE(v3dv_event, event, _event);
2570 p_atomic_set(&event->state, 0);
2571 return VK_SUCCESS;
2572 }
2573
2574 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateSampler(VkDevice _device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2575 v3dv_CreateSampler(VkDevice _device,
2576 const VkSamplerCreateInfo *pCreateInfo,
2577 const VkAllocationCallbacks *pAllocator,
2578 VkSampler *pSampler)
2579 {
2580 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2581 struct v3dv_sampler *sampler;
2582
2583 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
2584
2585 sampler = vk_object_zalloc(&device->vk, pAllocator, sizeof(*sampler),
2586 VK_OBJECT_TYPE_SAMPLER);
2587 if (!sampler)
2588 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2589
2590 sampler->compare_enable = pCreateInfo->compareEnable;
2591 sampler->unnormalized_coordinates = pCreateInfo->unnormalizedCoordinates;
2592
2593 const VkSamplerCustomBorderColorCreateInfoEXT *bc_info =
2594 vk_find_struct_const(pCreateInfo->pNext,
2595 SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
2596
2597 v3dv_X(device, pack_sampler_state)(sampler, pCreateInfo, bc_info);
2598
2599 *pSampler = v3dv_sampler_to_handle(sampler);
2600
2601 return VK_SUCCESS;
2602 }
2603
2604 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroySampler(VkDevice _device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)2605 v3dv_DestroySampler(VkDevice _device,
2606 VkSampler _sampler,
2607 const VkAllocationCallbacks *pAllocator)
2608 {
2609 V3DV_FROM_HANDLE(v3dv_device, device, _device);
2610 V3DV_FROM_HANDLE(v3dv_sampler, sampler, _sampler);
2611
2612 if (!sampler)
2613 return;
2614
2615 vk_object_free(&device->vk, pAllocator, sampler);
2616 }
2617
2618 VKAPI_ATTR void VKAPI_CALL
v3dv_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)2619 v3dv_GetDeviceMemoryCommitment(VkDevice device,
2620 VkDeviceMemory memory,
2621 VkDeviceSize *pCommittedMemoryInBytes)
2622 {
2623 *pCommittedMemoryInBytes = 0;
2624 }
2625
2626 VKAPI_ATTR void VKAPI_CALL
v3dv_GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)2627 v3dv_GetImageSparseMemoryRequirements(
2628 VkDevice device,
2629 VkImage image,
2630 uint32_t *pSparseMemoryRequirementCount,
2631 VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
2632 {
2633 *pSparseMemoryRequirementCount = 0;
2634 }
2635
2636 VKAPI_ATTR void VKAPI_CALL
v3dv_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)2637 v3dv_GetImageSparseMemoryRequirements2(
2638 VkDevice device,
2639 const VkImageSparseMemoryRequirementsInfo2 *pInfo,
2640 uint32_t *pSparseMemoryRequirementCount,
2641 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
2642 {
2643 *pSparseMemoryRequirementCount = 0;
2644 }
2645
2646 /* vk_icd.h does not declare this function, so we declare it here to
2647 * suppress Wmissing-prototypes.
2648 */
2649 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2650 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion);
2651
2652 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)2653 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion)
2654 {
2655 /* For the full details on loader interface versioning, see
2656 * <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2657 * What follows is a condensed summary, to help you navigate the large and
2658 * confusing official doc.
2659 *
2660 * - Loader interface v0 is incompatible with later versions. We don't
2661 * support it.
2662 *
2663 * - In loader interface v1:
2664 * - The first ICD entrypoint called by the loader is
2665 * vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2666 * entrypoint.
2667 * - The ICD must statically expose no other Vulkan symbol unless it is
2668 * linked with -Bsymbolic.
2669 * - Each dispatchable Vulkan handle created by the ICD must be
2670 * a pointer to a struct whose first member is VK_LOADER_DATA. The
2671 * ICD must initialize VK_LOADER_DATA.loadMagic to ICD_LOADER_MAGIC.
2672 * - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2673 * vkDestroySurfaceKHR(). The ICD must be capable of working with
2674 * such loader-managed surfaces.
2675 *
2676 * - Loader interface v2 differs from v1 in:
2677 * - The first ICD entrypoint called by the loader is
2678 * vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2679 * statically expose this entrypoint.
2680 *
2681 * - Loader interface v3 differs from v2 in:
2682 * - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2683 * vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2684 * because the loader no longer does so.
2685 *
2686 * - Loader interface v4 differs from v3 in:
2687 * - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
2688 */
2689 *pSupportedVersion = MIN2(*pSupportedVersion, 3u);
2690 return VK_SUCCESS;
2691 }
2692