• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_physical_device.h"
12 
13 #include <stdio.h>
14 
15 #include "git_sha1.h"
16 #include "util/mesa-sha1.h"
17 #include "venus-protocol/vn_protocol_driver_device.h"
18 #include "vk_android.h"
19 
20 #include "vn_android.h"
21 #include "vn_instance.h"
22 
23 #define IMAGE_FORMAT_CACHE_MAX_ENTRIES 100
24 
25 #define VN_EXTENSION_TABLE_INDEX(tbl, ext)                                   \
26    ((const bool *)((const void *)(&(tbl)) +                                  \
27                    offsetof(__typeof__(tbl), ext)) -                         \
28     (tbl).extensions)
29 
30 /** Add `elem` to the pNext chain of `head`. */
31 #define VN_ADD_PNEXT(head, s_type, elem)                                     \
32    do {                                                                      \
33       (elem).sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_##s_type;             \
34       (elem).pNext = (head).pNext;                                           \
35       (head).pNext = &(elem);                                                \
36    } while (0)
37 
38 /**
39  * If the renderer supports the extension, add `elem` to the pNext chain of
40  * `head`.
41  */
42 #define VN_ADD_PNEXT_EXT(head, s_type, elem, ext_cond)                       \
43    do {                                                                      \
44       if (ext_cond)                                                          \
45          VN_ADD_PNEXT((head), s_type, (elem));                               \
46    } while (0)
47 
48 /**
49  * Set member in core feature/property struct to value. (This provides visual
50  * parity with VN_SET_CORE_FIELD).
51  */
52 #define VN_SET_CORE_VALUE(core_struct, member, val)                          \
53    do {                                                                      \
54       (core_struct)->member = (val);                                         \
55    } while (0)
56 
57 /** Copy member into core feature/property struct from extension struct. */
58 #define VN_SET_CORE_FIELD(core_struct, member, ext_struct)                   \
59    VN_SET_CORE_VALUE((core_struct), member, (ext_struct).member)
60 
61 /**
62  * Copy array member into core feature/property struct from extension struct.
63  */
64 #define VN_SET_CORE_ARRAY(core_struct, member, ext_struct)                   \
65    do {                                                                      \
66       memcpy((core_struct)->member, (ext_struct).member,                     \
67              sizeof((core_struct)->member));                                 \
68    } while (0)
69 
70 static void
vn_physical_device_init_features(struct vn_physical_device * physical_dev)71 vn_physical_device_init_features(struct vn_physical_device *physical_dev)
72 {
73    const uint32_t renderer_version = physical_dev->renderer_version;
74    const struct vk_device_extension_table *exts =
75       &physical_dev->renderer_extensions;
76    struct vn_ring *ring = physical_dev->instance->ring.ring;
77    VkPhysicalDeviceFeatures2 feats2 = {
78       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
79    };
80    struct {
81       VkPhysicalDeviceFeatures vulkan_1_0;
82       VkPhysicalDeviceVulkan11Features vulkan_1_1;
83       VkPhysicalDeviceVulkan12Features vulkan_1_2;
84       VkPhysicalDeviceVulkan13Features vulkan_1_3;
85 
86       /* Vulkan 1.1 */
87       VkPhysicalDevice16BitStorageFeatures _16bit_storage;
88       VkPhysicalDeviceMultiviewFeatures multiview;
89       VkPhysicalDeviceVariablePointersFeatures variable_pointers;
90       VkPhysicalDeviceProtectedMemoryFeatures protected_memory;
91       VkPhysicalDeviceSamplerYcbcrConversionFeatures sampler_ycbcr_conversion;
92       VkPhysicalDeviceShaderDrawParametersFeatures shader_draw_parameters;
93 
94       /* Vulkan 1.2 */
95       VkPhysicalDevice8BitStorageFeatures _8bit_storage;
96       VkPhysicalDeviceShaderAtomicInt64Features shader_atomic_int64;
97       VkPhysicalDeviceShaderFloat16Int8Features shader_float16_int8;
98       VkPhysicalDeviceDescriptorIndexingFeatures descriptor_indexing;
99       VkPhysicalDeviceScalarBlockLayoutFeatures scalar_block_layout;
100       VkPhysicalDeviceImagelessFramebufferFeatures imageless_framebuffer;
101       VkPhysicalDeviceUniformBufferStandardLayoutFeatures
102          uniform_buffer_standard_layout;
103       VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures
104          shader_subgroup_extended_types;
105       VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures
106          separate_depth_stencil_layouts;
107       VkPhysicalDeviceHostQueryResetFeatures host_query_reset;
108       VkPhysicalDeviceTimelineSemaphoreFeatures timeline_semaphore;
109       VkPhysicalDeviceBufferDeviceAddressFeatures buffer_device_address;
110       VkPhysicalDeviceVulkanMemoryModelFeatures vulkan_memory_model;
111 
112       /* Vulkan 1.3 */
113       VkPhysicalDeviceDynamicRenderingFeatures dynamic_rendering;
114       VkPhysicalDeviceImageRobustnessFeatures image_robustness;
115       VkPhysicalDeviceInlineUniformBlockFeatures inline_uniform_block;
116       VkPhysicalDeviceMaintenance4Features maintenance4;
117       VkPhysicalDevicePipelineCreationCacheControlFeatures
118          pipeline_creation_cache_control;
119       VkPhysicalDevicePrivateDataFeatures private_data;
120       VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures
121          shader_demote_to_helper_invocation;
122       VkPhysicalDeviceShaderIntegerDotProductFeatures
123          shader_integer_dot_product;
124       VkPhysicalDeviceShaderTerminateInvocationFeatures
125          shader_terminate_invocation;
126       VkPhysicalDeviceSynchronization2Features synchronization2;
127       VkPhysicalDeviceSubgroupSizeControlFeatures subgroup_size_control;
128       VkPhysicalDeviceTextureCompressionASTCHDRFeatures
129          texture_compression_astc_hdr;
130       VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
131          zero_initialize_workgroup_memory;
132 
133       /* Vulkan 1.3: The extensions for the below structs were promoted, but
134        * some struct members were omitted from
135        * VkPhysicalDeviceVulkan13Features.
136        */
137       VkPhysicalDevice4444FormatsFeaturesEXT _4444_formats;
138       VkPhysicalDeviceExtendedDynamicStateFeaturesEXT extended_dynamic_state;
139       VkPhysicalDeviceExtendedDynamicState2FeaturesEXT
140          extended_dynamic_state_2;
141       VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT texel_buffer_alignment;
142       VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
143          ycbcr_2plane_444_formats;
144 
145       /* KHR */
146       VkPhysicalDeviceShaderClockFeaturesKHR shader_clock;
147       VkPhysicalDeviceShaderExpectAssumeFeaturesKHR expect_assume;
148 
149       /* EXT */
150       VkPhysicalDeviceBorderColorSwizzleFeaturesEXT border_color_swizzle;
151       VkPhysicalDeviceColorWriteEnableFeaturesEXT color_write_enable;
152       VkPhysicalDeviceConditionalRenderingFeaturesEXT conditional_rendering;
153       VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border_color;
154       VkPhysicalDeviceDepthClipControlFeaturesEXT depth_clip_control;
155       VkPhysicalDeviceDepthClipEnableFeaturesEXT depth_clip_enable;
156       VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT
157          dynamic_rendering_unused_attachments;
158       VkPhysicalDeviceExtendedDynamicState3FeaturesEXT
159          extended_dynamic_state_3;
160       VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT
161          fragment_shader_interlock;
162       VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT
163          graphics_pipeline_library;
164       VkPhysicalDeviceImage2DViewOf3DFeaturesEXT image_2d_view_of_3d;
165       VkPhysicalDeviceImageViewMinLodFeaturesEXT image_view_min_lod;
166       VkPhysicalDeviceIndexTypeUint8FeaturesEXT index_type_uint8;
167       VkPhysicalDeviceLineRasterizationFeaturesEXT line_rasterization;
168       VkPhysicalDeviceMultiDrawFeaturesEXT multi_draw;
169       VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT mutable_descriptor_type;
170       VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT non_seamless_cube_map;
171       VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
172          primitive_topology_list_restart;
173       VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT
174          primitives_generated_query;
175       VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex;
176       VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
177          rasterization_order_attachment_access;
178       VkPhysicalDeviceRobustness2FeaturesEXT robustness_2;
179       VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback;
180       VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT
181          vertex_attribute_divisor;
182       VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT
183          vertex_input_dynamic_state;
184    } local_feats;
185 
186    /* Clear the struct so that all unqueried features will be VK_FALSE. */
187    memset(&local_feats, 0, sizeof(local_feats));
188 
189    assert(renderer_version >= VK_API_VERSION_1_1);
190 
191    /* clang-format off */
192 
193    if (renderer_version >= VK_API_VERSION_1_2) {
194       VN_ADD_PNEXT(feats2, VULKAN_1_1_FEATURES, local_feats.vulkan_1_1);
195       VN_ADD_PNEXT(feats2, VULKAN_1_2_FEATURES, local_feats.vulkan_1_2);
196    } else {
197       /* Vulkan 1.1 */
198       VN_ADD_PNEXT(feats2, 16BIT_STORAGE_FEATURES, local_feats._16bit_storage);
199       VN_ADD_PNEXT(feats2, MULTIVIEW_FEATURES, local_feats.multiview);
200       VN_ADD_PNEXT(feats2, PROTECTED_MEMORY_FEATURES, local_feats.protected_memory);
201       VN_ADD_PNEXT(feats2, SAMPLER_YCBCR_CONVERSION_FEATURES, local_feats.sampler_ycbcr_conversion);
202       VN_ADD_PNEXT(feats2, SHADER_DRAW_PARAMETERS_FEATURES, local_feats.shader_draw_parameters);
203       VN_ADD_PNEXT(feats2, VARIABLE_POINTERS_FEATURES, local_feats.variable_pointers);
204 
205       /* Vulkan 1.2 */
206       VN_ADD_PNEXT_EXT(feats2, 8BIT_STORAGE_FEATURES, local_feats._8bit_storage, exts->KHR_8bit_storage);
207       VN_ADD_PNEXT_EXT(feats2, BUFFER_DEVICE_ADDRESS_FEATURES, local_feats.buffer_device_address, exts->KHR_buffer_device_address);
208       VN_ADD_PNEXT_EXT(feats2, DESCRIPTOR_INDEXING_FEATURES, local_feats.descriptor_indexing, exts->EXT_descriptor_indexing);
209       VN_ADD_PNEXT_EXT(feats2, HOST_QUERY_RESET_FEATURES, local_feats.host_query_reset, exts->EXT_host_query_reset);
210       VN_ADD_PNEXT_EXT(feats2, IMAGELESS_FRAMEBUFFER_FEATURES, local_feats.imageless_framebuffer, exts->KHR_imageless_framebuffer);
211       VN_ADD_PNEXT_EXT(feats2, SCALAR_BLOCK_LAYOUT_FEATURES, local_feats.scalar_block_layout, exts->EXT_scalar_block_layout);
212       VN_ADD_PNEXT_EXT(feats2, SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, local_feats.separate_depth_stencil_layouts, exts->KHR_separate_depth_stencil_layouts);
213       VN_ADD_PNEXT_EXT(feats2, SHADER_ATOMIC_INT64_FEATURES, local_feats.shader_atomic_int64, exts->KHR_shader_atomic_int64);
214       VN_ADD_PNEXT_EXT(feats2, SHADER_FLOAT16_INT8_FEATURES, local_feats.shader_float16_int8, exts->KHR_shader_float16_int8);
215       VN_ADD_PNEXT_EXT(feats2, SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, local_feats.shader_subgroup_extended_types, exts->KHR_shader_subgroup_extended_types);
216       VN_ADD_PNEXT_EXT(feats2, TIMELINE_SEMAPHORE_FEATURES, local_feats.timeline_semaphore, exts->KHR_timeline_semaphore);
217       VN_ADD_PNEXT_EXT(feats2, UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, local_feats.uniform_buffer_standard_layout, exts->KHR_uniform_buffer_standard_layout);
218       VN_ADD_PNEXT_EXT(feats2, VULKAN_MEMORY_MODEL_FEATURES, local_feats.vulkan_memory_model, exts->KHR_vulkan_memory_model);
219    }
220 
221    if (renderer_version >= VK_API_VERSION_1_3) {
222       VN_ADD_PNEXT(feats2, VULKAN_1_3_FEATURES, local_feats.vulkan_1_3);
223    } else {
224       VN_ADD_PNEXT_EXT(feats2, DYNAMIC_RENDERING_FEATURES, local_feats.dynamic_rendering, exts->KHR_dynamic_rendering);
225       VN_ADD_PNEXT_EXT(feats2, IMAGE_ROBUSTNESS_FEATURES, local_feats.image_robustness, exts->EXT_image_robustness);
226       VN_ADD_PNEXT_EXT(feats2, INLINE_UNIFORM_BLOCK_FEATURES, local_feats.inline_uniform_block, exts->EXT_inline_uniform_block);
227       VN_ADD_PNEXT_EXT(feats2, MAINTENANCE_4_FEATURES, local_feats.maintenance4, exts->KHR_maintenance4);
228       VN_ADD_PNEXT_EXT(feats2, PIPELINE_CREATION_CACHE_CONTROL_FEATURES, local_feats.pipeline_creation_cache_control, exts->EXT_pipeline_creation_cache_control);
229       VN_ADD_PNEXT_EXT(feats2, PRIVATE_DATA_FEATURES, local_feats.private_data, exts->EXT_private_data);
230       VN_ADD_PNEXT_EXT(feats2, SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, local_feats.shader_demote_to_helper_invocation, exts->EXT_shader_demote_to_helper_invocation);
231       VN_ADD_PNEXT_EXT(feats2, SHADER_INTEGER_DOT_PRODUCT_FEATURES, local_feats.shader_integer_dot_product, exts->KHR_shader_integer_dot_product);
232       VN_ADD_PNEXT_EXT(feats2, SHADER_TERMINATE_INVOCATION_FEATURES, local_feats.shader_terminate_invocation, exts->KHR_shader_terminate_invocation);
233       VN_ADD_PNEXT_EXT(feats2, SUBGROUP_SIZE_CONTROL_FEATURES, local_feats.subgroup_size_control, exts->EXT_subgroup_size_control);
234       VN_ADD_PNEXT_EXT(feats2, SYNCHRONIZATION_2_FEATURES, local_feats.synchronization2, exts->KHR_synchronization2);
235       VN_ADD_PNEXT_EXT(feats2, TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, local_feats.texture_compression_astc_hdr, exts->EXT_texture_compression_astc_hdr);
236       VN_ADD_PNEXT_EXT(feats2, ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, local_feats.zero_initialize_workgroup_memory, exts->KHR_zero_initialize_workgroup_memory);
237    }
238 
239    /* Vulkan 1.3: The extensions for the below structs were promoted, but some
240     * struct members were omitted from VkPhysicalDeviceVulkan13Features.
241     */
242    VN_ADD_PNEXT_EXT(feats2, 4444_FORMATS_FEATURES_EXT, local_feats._4444_formats, exts->EXT_4444_formats);
243    VN_ADD_PNEXT_EXT(feats2, EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, local_feats.extended_dynamic_state_2, exts->EXT_extended_dynamic_state2);
244    VN_ADD_PNEXT_EXT(feats2, EXTENDED_DYNAMIC_STATE_FEATURES_EXT, local_feats.extended_dynamic_state, exts->EXT_extended_dynamic_state);
245    VN_ADD_PNEXT_EXT(feats2, TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, local_feats.texel_buffer_alignment, exts->EXT_texel_buffer_alignment);
246    VN_ADD_PNEXT_EXT(feats2, YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, local_feats.ycbcr_2plane_444_formats, exts->EXT_ycbcr_2plane_444_formats);
247 
248    /* KHR */
249    VN_ADD_PNEXT_EXT(feats2, SHADER_CLOCK_FEATURES_KHR, local_feats.shader_clock, exts->KHR_shader_clock);
250    VN_ADD_PNEXT_EXT(feats2, SHADER_EXPECT_ASSUME_FEATURES_KHR, local_feats.expect_assume, exts->KHR_shader_expect_assume);
251 
252    /* EXT */
253    VN_ADD_PNEXT_EXT(feats2, BORDER_COLOR_SWIZZLE_FEATURES_EXT, local_feats.border_color_swizzle, exts->EXT_border_color_swizzle);
254    VN_ADD_PNEXT_EXT(feats2, COLOR_WRITE_ENABLE_FEATURES_EXT, local_feats.color_write_enable, exts->EXT_color_write_enable);
255    VN_ADD_PNEXT_EXT(feats2, CONDITIONAL_RENDERING_FEATURES_EXT, local_feats.conditional_rendering, exts->EXT_conditional_rendering);
256    VN_ADD_PNEXT_EXT(feats2, CUSTOM_BORDER_COLOR_FEATURES_EXT, local_feats.custom_border_color, exts->EXT_custom_border_color);
257    VN_ADD_PNEXT_EXT(feats2, DEPTH_CLIP_CONTROL_FEATURES_EXT, local_feats.depth_clip_control, exts->EXT_depth_clip_control);
258    VN_ADD_PNEXT_EXT(feats2, DEPTH_CLIP_ENABLE_FEATURES_EXT, local_feats.depth_clip_enable, exts->EXT_depth_clip_enable);
259    VN_ADD_PNEXT_EXT(feats2, DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT, local_feats.dynamic_rendering_unused_attachments, exts->EXT_dynamic_rendering_unused_attachments);
260    VN_ADD_PNEXT_EXT(feats2, EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, local_feats.extended_dynamic_state_3, exts->EXT_extended_dynamic_state3);
261    VN_ADD_PNEXT_EXT(feats2, FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, local_feats.fragment_shader_interlock, exts->EXT_fragment_shader_interlock);
262    VN_ADD_PNEXT_EXT(feats2, GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, local_feats.graphics_pipeline_library, exts->EXT_graphics_pipeline_library);
263    VN_ADD_PNEXT_EXT(feats2, IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, local_feats.image_2d_view_of_3d, exts->EXT_image_2d_view_of_3d);
264    VN_ADD_PNEXT_EXT(feats2, IMAGE_VIEW_MIN_LOD_FEATURES_EXT, local_feats.image_view_min_lod, exts->EXT_image_view_min_lod);
265    VN_ADD_PNEXT_EXT(feats2, INDEX_TYPE_UINT8_FEATURES_EXT, local_feats.index_type_uint8, exts->EXT_index_type_uint8);
266    VN_ADD_PNEXT_EXT(feats2, LINE_RASTERIZATION_FEATURES_EXT, local_feats.line_rasterization, exts->EXT_line_rasterization);
267    VN_ADD_PNEXT_EXT(feats2, MULTI_DRAW_FEATURES_EXT, local_feats.multi_draw, exts->EXT_multi_draw);
268    VN_ADD_PNEXT_EXT(feats2, MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, local_feats.mutable_descriptor_type, exts->EXT_mutable_descriptor_type || exts->VALVE_mutable_descriptor_type);
269    VN_ADD_PNEXT_EXT(feats2, NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, local_feats.non_seamless_cube_map, exts->EXT_non_seamless_cube_map);
270    VN_ADD_PNEXT_EXT(feats2, PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, local_feats.primitive_topology_list_restart, exts->EXT_primitive_topology_list_restart);
271    VN_ADD_PNEXT_EXT(feats2, PRIMITIVES_GENERATED_QUERY_FEATURES_EXT, local_feats.primitives_generated_query, exts->EXT_primitives_generated_query);
272    VN_ADD_PNEXT_EXT(feats2, PROVOKING_VERTEX_FEATURES_EXT, local_feats.provoking_vertex, exts->EXT_provoking_vertex);
273    VN_ADD_PNEXT_EXT(feats2, RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, local_feats.rasterization_order_attachment_access, exts->EXT_rasterization_order_attachment_access);
274    VN_ADD_PNEXT_EXT(feats2, ROBUSTNESS_2_FEATURES_EXT, local_feats.robustness_2, exts->EXT_robustness2);
275    VN_ADD_PNEXT_EXT(feats2, TRANSFORM_FEEDBACK_FEATURES_EXT, local_feats.transform_feedback, exts->EXT_transform_feedback);
276    VN_ADD_PNEXT_EXT(feats2, VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, local_feats.vertex_attribute_divisor, exts->EXT_vertex_attribute_divisor);
277    VN_ADD_PNEXT_EXT(feats2, VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, local_feats.vertex_input_dynamic_state, exts->EXT_vertex_input_dynamic_state);
278 
279    /* clang-format on */
280 
281    vn_call_vkGetPhysicalDeviceFeatures2(
282       ring, vn_physical_device_to_handle(physical_dev), &feats2);
283 
284    struct vk_features *feats = &physical_dev->base.base.supported_features;
285    vk_set_physical_device_features(feats, &feats2);
286 
287    /* Enable features for extensions natively implemented in Venus driver.
288     * See vn_physical_device_get_native_extensions.
289     */
290    VN_SET_CORE_VALUE(feats, deviceMemoryReport, true);
291 
292    /* To support sparse binding with feedback, we require sparse binding queue
293     * families to  also support submiting feedback commands. Any queue
294     * families that exclusively support sparse binding are filtered out. If a
295     * device only supports sparse binding with exclusive queue families that
296     * get filtered out then disable the feature.
297     */
298    if (physical_dev->sparse_binding_disabled) {
299       VN_SET_CORE_VALUE(feats, sparseBinding, false);
300       VN_SET_CORE_VALUE(feats, sparseResidencyBuffer, false);
301       VN_SET_CORE_VALUE(feats, sparseResidencyImage2D, false);
302       VN_SET_CORE_VALUE(feats, sparseResidencyImage3D, false);
303       VN_SET_CORE_VALUE(feats, sparseResidency2Samples, false);
304       VN_SET_CORE_VALUE(feats, sparseResidency4Samples, false);
305       VN_SET_CORE_VALUE(feats, sparseResidency8Samples, false);
306       VN_SET_CORE_VALUE(feats, sparseResidency16Samples, false);
307       VN_SET_CORE_VALUE(feats, sparseResidencyAliased, false);
308    }
309 
310    /* Disable unsupported ExtendedDynamicState3Features */
311    if (exts->EXT_extended_dynamic_state3) {
312       /* TODO: Add support for VK_EXT_sample_locations */
313       VN_SET_CORE_VALUE(feats, extendedDynamicState3SampleLocationsEnable,
314                         false);
315       /* TODO: Add support for VK_EXT_blend_operation_advanced */
316       VN_SET_CORE_VALUE(feats, extendedDynamicState3ColorBlendAdvanced,
317                         false);
318       /* VK_NV_* extensions required */
319       VN_SET_CORE_VALUE(feats, extendedDynamicState3ViewportWScalingEnable,
320                         false);
321       VN_SET_CORE_VALUE(feats, extendedDynamicState3ViewportSwizzle, false);
322       VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageToColorEnable,
323                         false);
324       VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageToColorLocation,
325                         false);
326       VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageModulationMode,
327                         false);
328       VN_SET_CORE_VALUE(
329          feats, extendedDynamicState3CoverageModulationTableEnable, false);
330       VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageModulationTable,
331                         false);
332       VN_SET_CORE_VALUE(feats, extendedDynamicState3CoverageReductionMode,
333                         false);
334       VN_SET_CORE_VALUE(
335          feats, extendedDynamicState3RepresentativeFragmentTestEnable, false);
336       VN_SET_CORE_VALUE(feats, extendedDynamicState3ShadingRateImageEnable,
337                         false);
338    }
339 }
340 
341 static void
vn_physical_device_init_uuids(struct vn_physical_device * physical_dev)342 vn_physical_device_init_uuids(struct vn_physical_device *physical_dev)
343 {
344    struct vn_physical_device_properties *props = &physical_dev->properties;
345    struct VkPhysicalDeviceProperties *vk10_props = &props->vulkan_1_0;
346    struct VkPhysicalDeviceVulkan11Properties *vk11_props = &props->vulkan_1_1;
347    struct VkPhysicalDeviceVulkan12Properties *vk12_props = &props->vulkan_1_2;
348    struct mesa_sha1 sha1_ctx;
349    uint8_t sha1[SHA1_DIGEST_LENGTH];
350 
351    static_assert(VK_UUID_SIZE <= SHA1_DIGEST_LENGTH, "");
352 
353    _mesa_sha1_init(&sha1_ctx);
354    _mesa_sha1_update(&sha1_ctx, &vk10_props->pipelineCacheUUID,
355                      sizeof(vk10_props->pipelineCacheUUID));
356    _mesa_sha1_final(&sha1_ctx, sha1);
357 
358    memcpy(vk10_props->pipelineCacheUUID, sha1, VK_UUID_SIZE);
359 
360    _mesa_sha1_init(&sha1_ctx);
361    _mesa_sha1_update(&sha1_ctx, &vk10_props->vendorID,
362                      sizeof(vk10_props->vendorID));
363    _mesa_sha1_update(&sha1_ctx, &vk10_props->deviceID,
364                      sizeof(vk10_props->deviceID));
365    _mesa_sha1_final(&sha1_ctx, sha1);
366 
367    memcpy(vk11_props->deviceUUID, sha1, VK_UUID_SIZE);
368 
369    _mesa_sha1_init(&sha1_ctx);
370    _mesa_sha1_update(&sha1_ctx, vk12_props->driverName,
371                      strlen(vk12_props->driverName));
372    _mesa_sha1_update(&sha1_ctx, vk12_props->driverInfo,
373                      strlen(vk12_props->driverInfo));
374    _mesa_sha1_final(&sha1_ctx, sha1);
375 
376    memcpy(vk11_props->driverUUID, sha1, VK_UUID_SIZE);
377 
378    memset(vk11_props->deviceLUID, 0, VK_LUID_SIZE);
379    vk11_props->deviceNodeMask = 0;
380    vk11_props->deviceLUIDValid = false;
381 }
382 
383 static void
vn_physical_device_init_properties(struct vn_physical_device * physical_dev)384 vn_physical_device_init_properties(struct vn_physical_device *physical_dev)
385 {
386    const uint32_t renderer_version = physical_dev->renderer_version;
387    struct vn_instance *instance = physical_dev->instance;
388    struct vn_physical_device_properties *props = &physical_dev->properties;
389    const struct vk_device_extension_table *exts =
390       &physical_dev->renderer_extensions;
391    VkPhysicalDeviceProperties2 props2 = {
392       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
393    };
394    struct {
395       /* Vulkan 1.1 */
396       VkPhysicalDeviceIDProperties id;
397       VkPhysicalDeviceSubgroupProperties subgroup;
398       VkPhysicalDevicePointClippingProperties point_clipping;
399       VkPhysicalDeviceMultiviewProperties multiview;
400       VkPhysicalDeviceProtectedMemoryProperties protected_memory;
401       VkPhysicalDeviceMaintenance3Properties maintenance_3;
402 
403       /* Vulkan 1.2 */
404       VkPhysicalDeviceDriverProperties driver;
405       VkPhysicalDeviceFloatControlsProperties float_controls;
406       VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing;
407       VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve;
408       VkPhysicalDeviceSamplerFilterMinmaxProperties sampler_filter_minmax;
409       VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore;
410 
411       /* Vulkan 1.3 */
412       VkPhysicalDeviceInlineUniformBlockProperties inline_uniform_block;
413       VkPhysicalDeviceMaintenance4Properties maintenance4;
414       VkPhysicalDeviceShaderIntegerDotProductProperties
415          shader_integer_dot_product;
416       VkPhysicalDeviceSubgroupSizeControlProperties subgroup_size_control;
417       VkPhysicalDeviceTexelBufferAlignmentProperties texel_buffer_alignment;
418    } local_props;
419 
420    /* Clear the structs so all unqueried properties will be well-defined. */
421    memset(props, 0, sizeof(*props));
422    memset(&local_props, 0, sizeof(local_props));
423 
424    assert(renderer_version >= VK_API_VERSION_1_1);
425 
426    /* clang-format off */
427 
428    if (renderer_version >= VK_API_VERSION_1_2) {
429       VN_ADD_PNEXT(props2, VULKAN_1_1_PROPERTIES, props->vulkan_1_1);
430       VN_ADD_PNEXT(props2, VULKAN_1_2_PROPERTIES, props->vulkan_1_2);
431    } else {
432       /* Vulkan 1.1 */
433       VN_ADD_PNEXT(props2, ID_PROPERTIES, local_props.id);
434       VN_ADD_PNEXT(props2, MAINTENANCE_3_PROPERTIES, local_props.maintenance_3);
435       VN_ADD_PNEXT(props2, MULTIVIEW_PROPERTIES, local_props.multiview);
436       VN_ADD_PNEXT(props2, POINT_CLIPPING_PROPERTIES, local_props.point_clipping);
437       VN_ADD_PNEXT(props2, PROTECTED_MEMORY_PROPERTIES, local_props.protected_memory);
438       VN_ADD_PNEXT(props2, SUBGROUP_PROPERTIES, local_props.subgroup);
439 
440       /* Vulkan 1.2 */
441       VN_ADD_PNEXT_EXT(props2, DEPTH_STENCIL_RESOLVE_PROPERTIES, local_props.depth_stencil_resolve, exts->KHR_depth_stencil_resolve);
442       VN_ADD_PNEXT_EXT(props2, DESCRIPTOR_INDEXING_PROPERTIES, local_props.descriptor_indexing, exts->EXT_descriptor_indexing);
443       VN_ADD_PNEXT_EXT(props2, DRIVER_PROPERTIES, local_props.driver, exts->KHR_driver_properties);
444       VN_ADD_PNEXT_EXT(props2, FLOAT_CONTROLS_PROPERTIES, local_props.float_controls, exts->KHR_shader_float_controls);
445       VN_ADD_PNEXT_EXT(props2, SAMPLER_FILTER_MINMAX_PROPERTIES, local_props.sampler_filter_minmax, exts->EXT_sampler_filter_minmax);
446       VN_ADD_PNEXT_EXT(props2, TIMELINE_SEMAPHORE_PROPERTIES, local_props.timeline_semaphore, exts->KHR_timeline_semaphore);
447    }
448 
449    if (renderer_version >= VK_API_VERSION_1_3) {
450       VN_ADD_PNEXT(props2, VULKAN_1_3_PROPERTIES, props->vulkan_1_3);
451    } else {
452       VN_ADD_PNEXT_EXT(props2, INLINE_UNIFORM_BLOCK_PROPERTIES, local_props.inline_uniform_block, exts->EXT_inline_uniform_block);
453       VN_ADD_PNEXT_EXT(props2, MAINTENANCE_4_PROPERTIES, local_props.maintenance4, exts->KHR_maintenance4);
454       VN_ADD_PNEXT_EXT(props2, SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, local_props.shader_integer_dot_product, exts->KHR_shader_integer_dot_product);
455       VN_ADD_PNEXT_EXT(props2, SUBGROUP_SIZE_CONTROL_PROPERTIES, local_props.subgroup_size_control, exts->EXT_subgroup_size_control);
456       VN_ADD_PNEXT_EXT(props2, TEXEL_BUFFER_ALIGNMENT_PROPERTIES, local_props.texel_buffer_alignment, exts->EXT_texel_buffer_alignment);
457    }
458 
459    /* KHR */
460    VN_ADD_PNEXT_EXT(props2, PUSH_DESCRIPTOR_PROPERTIES_KHR, props->push_descriptor, exts->KHR_push_descriptor);
461 
462    /* EXT */
463    VN_ADD_PNEXT_EXT(props2, CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, props->conservative_rasterization, exts->EXT_conservative_rasterization);
464    VN_ADD_PNEXT_EXT(props2, CUSTOM_BORDER_COLOR_PROPERTIES_EXT, props->custom_border_color, exts->EXT_custom_border_color);
465    VN_ADD_PNEXT_EXT(props2, EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, props->extended_dynamic_state_3, exts->EXT_extended_dynamic_state3);
466    VN_ADD_PNEXT_EXT(props2, GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, props->graphics_pipeline_library, exts->EXT_graphics_pipeline_library);
467    VN_ADD_PNEXT_EXT(props2, LINE_RASTERIZATION_PROPERTIES_EXT, props->line_rasterization, exts->EXT_line_rasterization);
468    VN_ADD_PNEXT_EXT(props2, MULTI_DRAW_PROPERTIES_EXT, props->multi_draw, exts->EXT_multi_draw);
469    VN_ADD_PNEXT_EXT(props2, PCI_BUS_INFO_PROPERTIES_EXT, props->pci_bus_info, exts->EXT_pci_bus_info);
470    VN_ADD_PNEXT_EXT(props2, PROVOKING_VERTEX_PROPERTIES_EXT, props->provoking_vertex, exts->EXT_provoking_vertex);
471    VN_ADD_PNEXT_EXT(props2, ROBUSTNESS_2_PROPERTIES_EXT, props->robustness_2, exts->EXT_robustness2);
472    VN_ADD_PNEXT_EXT(props2, TRANSFORM_FEEDBACK_PROPERTIES_EXT, props->transform_feedback, exts->EXT_transform_feedback);
473    VN_ADD_PNEXT_EXT(props2, VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, props->vertex_attribute_divisor, exts->EXT_vertex_attribute_divisor);
474 
475    /* clang-format on */
476 
477    vn_call_vkGetPhysicalDeviceProperties2(
478       instance->ring.ring, vn_physical_device_to_handle(physical_dev),
479       &props2);
480 
481    VkPhysicalDeviceProperties *vk10_props = &props->vulkan_1_0;
482    VkPhysicalDeviceVulkan11Properties *vk11_props = &props->vulkan_1_1;
483    VkPhysicalDeviceVulkan12Properties *vk12_props = &props->vulkan_1_2;
484    VkPhysicalDeviceVulkan13Properties *vk13_props = &props->vulkan_1_3;
485 
486    *vk10_props = props2.properties;
487 
488    /* clang-format off */
489 
490    /* See comment for sparse binding feature disable */
491    if (physical_dev->sparse_binding_disabled) {
492       VN_SET_CORE_VALUE(vk10_props, limits.sparseAddressSpaceSize, 0);
493       VN_SET_CORE_VALUE(vk10_props, sparseProperties, (VkPhysicalDeviceSparseProperties){ 0 });
494    }
495 
496    if (renderer_version < VK_API_VERSION_1_2) {
497       /* Vulkan 1.1 */
498       VN_SET_CORE_ARRAY(vk11_props, deviceUUID, local_props.id);
499       VN_SET_CORE_ARRAY(vk11_props, driverUUID, local_props.id);
500       VN_SET_CORE_ARRAY(vk11_props, deviceLUID, local_props.id);
501       VN_SET_CORE_FIELD(vk11_props, deviceNodeMask, local_props.id);
502       VN_SET_CORE_FIELD(vk11_props, deviceLUIDValid, local_props.id);
503 
504       /* Cannot use macro because names differ. */
505       vk11_props->subgroupSize = local_props.subgroup.subgroupSize;
506       vk11_props->subgroupSupportedStages = local_props.subgroup.supportedStages;
507       vk11_props->subgroupSupportedOperations = local_props.subgroup.supportedOperations;
508       vk11_props->subgroupQuadOperationsInAllStages = local_props.subgroup.quadOperationsInAllStages;
509 
510       VN_SET_CORE_FIELD(vk11_props, pointClippingBehavior, local_props.point_clipping);
511       VN_SET_CORE_FIELD(vk11_props, maxMultiviewViewCount, local_props.multiview);
512       VN_SET_CORE_FIELD(vk11_props, maxMultiviewInstanceIndex, local_props.multiview);
513       VN_SET_CORE_FIELD(vk11_props, protectedNoFault, local_props.protected_memory);
514       VN_SET_CORE_FIELD(vk11_props, maxPerSetDescriptors, local_props.maintenance_3);
515       VN_SET_CORE_FIELD(vk11_props, maxMemoryAllocationSize, local_props.maintenance_3);
516 
517       /* Vulkan 1.2 */
518       if (exts->KHR_driver_properties) {
519          VN_SET_CORE_FIELD(vk12_props, driverID, local_props.driver);
520          VN_SET_CORE_ARRAY(vk12_props, driverName, local_props.driver);
521          VN_SET_CORE_ARRAY(vk12_props, driverInfo, local_props.driver);
522          VN_SET_CORE_FIELD(vk12_props, conformanceVersion, local_props.driver);
523       }
524       if (exts->KHR_shader_float_controls) {
525          VN_SET_CORE_FIELD(vk12_props, denormBehaviorIndependence, local_props.float_controls);
526          VN_SET_CORE_FIELD(vk12_props, roundingModeIndependence, local_props.float_controls);
527          VN_SET_CORE_FIELD(vk12_props, shaderSignedZeroInfNanPreserveFloat16, local_props.float_controls);
528          VN_SET_CORE_FIELD(vk12_props, shaderSignedZeroInfNanPreserveFloat32, local_props.float_controls);
529          VN_SET_CORE_FIELD(vk12_props, shaderSignedZeroInfNanPreserveFloat64, local_props.float_controls);
530          VN_SET_CORE_FIELD(vk12_props, shaderDenormPreserveFloat16, local_props.float_controls);
531          VN_SET_CORE_FIELD(vk12_props, shaderDenormPreserveFloat32, local_props.float_controls);
532          VN_SET_CORE_FIELD(vk12_props, shaderDenormPreserveFloat64, local_props.float_controls);
533          VN_SET_CORE_FIELD(vk12_props, shaderDenormFlushToZeroFloat16, local_props.float_controls);
534          VN_SET_CORE_FIELD(vk12_props, shaderDenormFlushToZeroFloat32, local_props.float_controls);
535          VN_SET_CORE_FIELD(vk12_props, shaderDenormFlushToZeroFloat64, local_props.float_controls);
536          VN_SET_CORE_FIELD(vk12_props, shaderRoundingModeRTEFloat16, local_props.float_controls);
537          VN_SET_CORE_FIELD(vk12_props, shaderRoundingModeRTEFloat32, local_props.float_controls);
538          VN_SET_CORE_FIELD(vk12_props, shaderRoundingModeRTEFloat64, local_props.float_controls);
539          VN_SET_CORE_FIELD(vk12_props, shaderRoundingModeRTZFloat16, local_props.float_controls);
540          VN_SET_CORE_FIELD(vk12_props, shaderRoundingModeRTZFloat32, local_props.float_controls);
541          VN_SET_CORE_FIELD(vk12_props, shaderRoundingModeRTZFloat64, local_props.float_controls);
542       }
543       if (exts->EXT_descriptor_indexing) {
544          VN_SET_CORE_FIELD(vk12_props, maxUpdateAfterBindDescriptorsInAllPools, local_props.descriptor_indexing);
545          VN_SET_CORE_FIELD(vk12_props, shaderUniformBufferArrayNonUniformIndexingNative, local_props.descriptor_indexing);
546          VN_SET_CORE_FIELD(vk12_props, shaderSampledImageArrayNonUniformIndexingNative, local_props.descriptor_indexing);
547          VN_SET_CORE_FIELD(vk12_props, shaderStorageBufferArrayNonUniformIndexingNative, local_props.descriptor_indexing);
548          VN_SET_CORE_FIELD(vk12_props, shaderStorageImageArrayNonUniformIndexingNative, local_props.descriptor_indexing);
549          VN_SET_CORE_FIELD(vk12_props, shaderInputAttachmentArrayNonUniformIndexingNative, local_props.descriptor_indexing);
550          VN_SET_CORE_FIELD(vk12_props, robustBufferAccessUpdateAfterBind, local_props.descriptor_indexing);
551          VN_SET_CORE_FIELD(vk12_props, quadDivergentImplicitLod, local_props.descriptor_indexing);
552          VN_SET_CORE_FIELD(vk12_props, maxPerStageDescriptorUpdateAfterBindSamplers, local_props.descriptor_indexing);
553          VN_SET_CORE_FIELD(vk12_props, maxPerStageDescriptorUpdateAfterBindUniformBuffers, local_props.descriptor_indexing);
554          VN_SET_CORE_FIELD(vk12_props, maxPerStageDescriptorUpdateAfterBindStorageBuffers, local_props.descriptor_indexing);
555          VN_SET_CORE_FIELD(vk12_props, maxPerStageDescriptorUpdateAfterBindSampledImages, local_props.descriptor_indexing);
556          VN_SET_CORE_FIELD(vk12_props, maxPerStageDescriptorUpdateAfterBindStorageImages, local_props.descriptor_indexing);
557          VN_SET_CORE_FIELD(vk12_props, maxPerStageDescriptorUpdateAfterBindInputAttachments, local_props.descriptor_indexing);
558          VN_SET_CORE_FIELD(vk12_props, maxPerStageUpdateAfterBindResources, local_props.descriptor_indexing);
559          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindSamplers, local_props.descriptor_indexing);
560          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindUniformBuffers, local_props.descriptor_indexing);
561          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, local_props.descriptor_indexing);
562          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindStorageBuffers, local_props.descriptor_indexing);
563          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, local_props.descriptor_indexing);
564          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindSampledImages, local_props.descriptor_indexing);
565          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindStorageImages, local_props.descriptor_indexing);
566          VN_SET_CORE_FIELD(vk12_props, maxDescriptorSetUpdateAfterBindInputAttachments, local_props.descriptor_indexing);
567       }
568       if (exts->KHR_depth_stencil_resolve) {
569          VN_SET_CORE_FIELD(vk12_props, supportedDepthResolveModes, local_props.depth_stencil_resolve);
570          VN_SET_CORE_FIELD(vk12_props, supportedStencilResolveModes, local_props.depth_stencil_resolve);
571          VN_SET_CORE_FIELD(vk12_props, independentResolveNone, local_props.depth_stencil_resolve);
572          VN_SET_CORE_FIELD(vk12_props, independentResolve, local_props.depth_stencil_resolve);
573       }
574       if (exts->EXT_sampler_filter_minmax) {
575          VN_SET_CORE_FIELD(vk12_props, filterMinmaxSingleComponentFormats, local_props.sampler_filter_minmax);
576          VN_SET_CORE_FIELD(vk12_props, filterMinmaxImageComponentMapping, local_props.sampler_filter_minmax);
577       }
578       if (exts->KHR_timeline_semaphore) {
579          VN_SET_CORE_FIELD(vk12_props, maxTimelineSemaphoreValueDifference, local_props.timeline_semaphore);
580       }
581 
582       VN_SET_CORE_VALUE(vk12_props, framebufferIntegerColorSampleCounts, VK_SAMPLE_COUNT_1_BIT);
583    }
584 
585    if (renderer_version < VK_API_VERSION_1_3) {
586       if (exts->EXT_subgroup_size_control) {
587          VN_SET_CORE_FIELD(vk13_props, minSubgroupSize, local_props.subgroup_size_control);
588          VN_SET_CORE_FIELD(vk13_props, maxSubgroupSize, local_props.subgroup_size_control);
589          VN_SET_CORE_FIELD(vk13_props, maxComputeWorkgroupSubgroups, local_props.subgroup_size_control);
590          VN_SET_CORE_FIELD(vk13_props, requiredSubgroupSizeStages, local_props.subgroup_size_control);
591       }
592       if (exts->EXT_inline_uniform_block) {
593          VN_SET_CORE_FIELD(vk13_props, maxInlineUniformBlockSize, local_props.inline_uniform_block);
594          VN_SET_CORE_FIELD(vk13_props, maxPerStageDescriptorInlineUniformBlocks, local_props.inline_uniform_block);
595          VN_SET_CORE_FIELD(vk13_props, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, local_props.inline_uniform_block);
596          VN_SET_CORE_FIELD(vk13_props, maxDescriptorSetInlineUniformBlocks, local_props.inline_uniform_block);
597          VN_SET_CORE_FIELD(vk13_props, maxDescriptorSetUpdateAfterBindInlineUniformBlocks, local_props.inline_uniform_block);
598       }
599       if (exts->KHR_shader_integer_dot_product) {
600          VN_SET_CORE_FIELD(vk13_props, integerDotProduct8BitUnsignedAccelerated, local_props.shader_integer_dot_product);
601          VN_SET_CORE_FIELD(vk13_props, integerDotProduct8BitSignedAccelerated, local_props.shader_integer_dot_product);
602          VN_SET_CORE_FIELD(vk13_props, integerDotProduct8BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
603          VN_SET_CORE_FIELD(vk13_props, integerDotProduct4x8BitPackedUnsignedAccelerated, local_props.shader_integer_dot_product);
604          VN_SET_CORE_FIELD(vk13_props, integerDotProduct4x8BitPackedSignedAccelerated, local_props.shader_integer_dot_product);
605          VN_SET_CORE_FIELD(vk13_props, integerDotProduct4x8BitPackedMixedSignednessAccelerated, local_props.shader_integer_dot_product);
606          VN_SET_CORE_FIELD(vk13_props, integerDotProduct16BitUnsignedAccelerated, local_props.shader_integer_dot_product);
607          VN_SET_CORE_FIELD(vk13_props, integerDotProduct16BitSignedAccelerated, local_props.shader_integer_dot_product);
608          VN_SET_CORE_FIELD(vk13_props, integerDotProduct16BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
609          VN_SET_CORE_FIELD(vk13_props, integerDotProduct32BitUnsignedAccelerated, local_props.shader_integer_dot_product);
610          VN_SET_CORE_FIELD(vk13_props, integerDotProduct32BitSignedAccelerated, local_props.shader_integer_dot_product);
611          VN_SET_CORE_FIELD(vk13_props, integerDotProduct32BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
612          VN_SET_CORE_FIELD(vk13_props, integerDotProduct64BitUnsignedAccelerated, local_props.shader_integer_dot_product);
613          VN_SET_CORE_FIELD(vk13_props, integerDotProduct64BitSignedAccelerated, local_props.shader_integer_dot_product);
614          VN_SET_CORE_FIELD(vk13_props, integerDotProduct64BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
615          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, local_props.shader_integer_dot_product);
616          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating8BitSignedAccelerated, local_props.shader_integer_dot_product);
617          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
618          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, local_props.shader_integer_dot_product);
619          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, local_props.shader_integer_dot_product);
620          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, local_props.shader_integer_dot_product);
621          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, local_props.shader_integer_dot_product);
622          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating16BitSignedAccelerated, local_props.shader_integer_dot_product);
623          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
624          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, local_props.shader_integer_dot_product);
625          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating32BitSignedAccelerated, local_props.shader_integer_dot_product);
626          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
627          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, local_props.shader_integer_dot_product);
628          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating64BitSignedAccelerated, local_props.shader_integer_dot_product);
629          VN_SET_CORE_FIELD(vk13_props, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, local_props.shader_integer_dot_product);
630       }
631       if (exts->EXT_texel_buffer_alignment) {
632          VN_SET_CORE_FIELD(vk13_props, storageTexelBufferOffsetAlignmentBytes, local_props.texel_buffer_alignment);
633          VN_SET_CORE_FIELD(vk13_props, storageTexelBufferOffsetSingleTexelAlignment, local_props.texel_buffer_alignment);
634          VN_SET_CORE_FIELD(vk13_props, uniformTexelBufferOffsetAlignmentBytes, local_props.texel_buffer_alignment);
635          VN_SET_CORE_FIELD(vk13_props, uniformTexelBufferOffsetSingleTexelAlignment, local_props.texel_buffer_alignment);
636       }
637       if (exts->KHR_maintenance4) {
638          VN_SET_CORE_FIELD(vk13_props, maxBufferSize, local_props.maintenance4);
639       }
640    }
641 
642    /* clang-format on */
643 
644    const uint32_t version_override = vk_get_version_override();
645    if (version_override) {
646       vk10_props->apiVersion = version_override;
647    } else {
648       /* cap the advertised api version */
649       uint32_t ver = MIN3(vk10_props->apiVersion, VN_MAX_API_VERSION,
650                           instance->renderer->info.vk_xml_version);
651       if (VK_VERSION_PATCH(ver) > VK_VERSION_PATCH(vk10_props->apiVersion)) {
652          ver = ver - VK_VERSION_PATCH(ver) +
653                VK_VERSION_PATCH(vk10_props->apiVersion);
654       }
655 
656       /* Clamp to 1.2 if we disabled VK_KHR_synchronization2 since it
657        * is required for 1.3.
658        * See vn_physical_device_get_passthrough_extensions()
659        */
660       if (!physical_dev->base.base.supported_extensions.KHR_synchronization2)
661          ver = MIN2(VK_API_VERSION_1_2, ver);
662 
663       vk10_props->apiVersion = ver;
664    }
665 
666    /* ANGLE relies on ARM proprietary driver version for workarounds */
667    const char *engine_name = instance->base.base.app_info.engine_name;
668    const bool forward_driver_version =
669       vk12_props->driverID == VK_DRIVER_ID_ARM_PROPRIETARY && engine_name &&
670       strcmp(engine_name, "ANGLE") == 0;
671    if (!forward_driver_version)
672       vk10_props->driverVersion = vk_get_driver_version();
673 
674    char device_name[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
675    int device_name_len =
676       snprintf(device_name, sizeof(device_name), "Virtio-GPU Venus (%s)",
677                vk10_props->deviceName);
678    if (device_name_len >= VK_MAX_PHYSICAL_DEVICE_NAME_SIZE) {
679       memcpy(device_name + VK_MAX_PHYSICAL_DEVICE_NAME_SIZE - 5, "...)", 4);
680       device_name_len = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE - 1;
681    }
682    memcpy(vk10_props->deviceName, device_name, device_name_len + 1);
683 
684    /* store renderer VkDriverId for implementation specific workarounds */
685    physical_dev->renderer_driver_id = vk12_props->driverID;
686    VN_SET_CORE_VALUE(vk12_props, driverID, VK_DRIVER_ID_MESA_VENUS);
687 
688    snprintf(vk12_props->driverName, sizeof(vk12_props->driverName), "venus");
689    snprintf(vk12_props->driverInfo, sizeof(vk12_props->driverInfo),
690             "Mesa " PACKAGE_VERSION MESA_GIT_SHA1);
691 
692    VN_SET_CORE_VALUE(vk12_props, conformanceVersion.major, 1);
693    VN_SET_CORE_VALUE(vk12_props, conformanceVersion.minor, 3);
694    VN_SET_CORE_VALUE(vk12_props, conformanceVersion.subminor, 0);
695    VN_SET_CORE_VALUE(vk12_props, conformanceVersion.patch, 0);
696 
697    vn_physical_device_init_uuids(physical_dev);
698 }
699 
700 static VkResult
vn_physical_device_init_queue_family_properties(struct vn_physical_device * physical_dev)701 vn_physical_device_init_queue_family_properties(
702    struct vn_physical_device *physical_dev)
703 {
704    struct vn_instance *instance = physical_dev->instance;
705    struct vn_ring *ring = instance->ring.ring;
706    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
707    uint32_t count;
708 
709    vn_call_vkGetPhysicalDeviceQueueFamilyProperties2(
710       ring, vn_physical_device_to_handle(physical_dev), &count, NULL);
711 
712    VkQueueFamilyProperties2 *props =
713       vk_alloc(alloc, sizeof(*props) * count, VN_DEFAULT_ALIGN,
714                VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
715    if (!props)
716       return VK_ERROR_OUT_OF_HOST_MEMORY;
717 
718    for (uint32_t i = 0; i < count; i++) {
719       props[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
720       props[i].pNext = NULL;
721    }
722    vn_call_vkGetPhysicalDeviceQueueFamilyProperties2(
723       ring, vn_physical_device_to_handle(physical_dev), &count, props);
724 
725    /* Filter out queue families that exclusively support sparse binding as
726     * we need additional support for submitting feedback commands
727     */
728    uint32_t sparse_count = 0;
729    uint32_t non_sparse_only_count = 0;
730    for (uint32_t i = 0; i < count; i++) {
731       if (props[i].queueFamilyProperties.queueFlags &
732           ~VK_QUEUE_SPARSE_BINDING_BIT) {
733          props[non_sparse_only_count++].queueFamilyProperties =
734             props[i].queueFamilyProperties;
735       }
736       if (props[i].queueFamilyProperties.queueFlags &
737           VK_QUEUE_SPARSE_BINDING_BIT) {
738          sparse_count++;
739       }
740    }
741 
742    if (VN_DEBUG(NO_SPARSE) ||
743        (sparse_count && non_sparse_only_count + sparse_count == count))
744       physical_dev->sparse_binding_disabled = true;
745 
746    physical_dev->queue_family_properties = props;
747    physical_dev->queue_family_count = non_sparse_only_count;
748 
749    return VK_SUCCESS;
750 }
751 
752 static void
vn_physical_device_init_memory_properties(struct vn_physical_device * physical_dev)753 vn_physical_device_init_memory_properties(
754    struct vn_physical_device *physical_dev)
755 {
756    struct vn_instance *instance = physical_dev->instance;
757    struct vn_ring *ring = instance->ring.ring;
758    VkPhysicalDeviceMemoryProperties2 props2 = {
759       .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
760    };
761    vn_call_vkGetPhysicalDeviceMemoryProperties2(
762       ring, vn_physical_device_to_handle(physical_dev), &props2);
763 
764    physical_dev->memory_properties = props2.memoryProperties;
765 
766    /* Kernel makes every mapping coherent. If a memory type is truly
767     * incoherent, it's better to remove the host-visible flag than silently
768     * making it coherent. However, for app compatibility purpose, when
769     * coherent-cached memory type is unavailable, we emulate the first cached
770     * memory type with the first coherent memory type.
771     */
772    uint32_t coherent_uncached = VK_MAX_MEMORY_TYPES;
773    uint32_t incoherent_cached = VK_MAX_MEMORY_TYPES;
774    VkPhysicalDeviceMemoryProperties *props = &physical_dev->memory_properties;
775    for (uint32_t i = 0; i < props->memoryTypeCount; i++) {
776       const VkMemoryPropertyFlags flags = props->memoryTypes[i].propertyFlags;
777       const bool coherent = flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
778       const bool cached = flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
779       if (coherent && cached) {
780          coherent_uncached = VK_MAX_MEMORY_TYPES;
781          incoherent_cached = VK_MAX_MEMORY_TYPES;
782          break;
783       } else if (coherent && coherent_uncached == VK_MAX_MEMORY_TYPES) {
784          coherent_uncached = i;
785       } else if (cached && incoherent_cached == VK_MAX_MEMORY_TYPES) {
786          incoherent_cached = i;
787       }
788    }
789 
790    for (uint32_t i = 0; i < props->memoryTypeCount; i++) {
791       VkMemoryType *type = &props->memoryTypes[i];
792       if (i == incoherent_cached) {
793          /* Only get here if no coherent+cached type is available, and the
794           * spec guarantees that there is at least one coherent type, so it
795           * must be coherent+uncached, hence the index is always valid.
796           */
797          assert(coherent_uncached < props->memoryTypeCount);
798          type->heapIndex = props->memoryTypes[coherent_uncached].heapIndex;
799       } else if (!(type->propertyFlags &
800                    VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
801          type->propertyFlags &= ~(VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
802                                   VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
803       }
804    }
805 
806    physical_dev->coherent_uncached = coherent_uncached;
807    physical_dev->incoherent_cached = incoherent_cached;
808 }
809 
810 static void
vn_physical_device_init_external_memory(struct vn_physical_device * physical_dev)811 vn_physical_device_init_external_memory(
812    struct vn_physical_device *physical_dev)
813 {
814    /* When a renderer VkDeviceMemory is exportable, we can create a
815     * vn_renderer_bo from it. The vn_renderer_bo can be freely exported as an
816     * opaque fd or a dma-buf.
817     *
818     * When an external memory can be imported as a vn_renderer_bo, that bo
819     * might be imported as a renderer side VkDeviceMemory.
820     *
821     * However, to know if a rendender VkDeviceMemory is exportable or if a bo
822     * can be imported as a renderer VkDeviceMemory. We have to start from
823     * physical device external image and external buffer properties queries,
824     * which requires to know the renderer supported external handle types. For
825     * such info, we can reliably retrieve from the external memory extensions
826     * advertised by the renderer.
827     *
828     * We require VK_EXT_external_memory_dma_buf to expose driver side external
829     * memory support for a renderer running on Linux. As a comparison, when
830     * the renderer runs on Windows, VK_KHR_external_memory_win32 might be
831     * required for the same.
832     *
833     * For vtest, the protocol does not support external memory import. So we
834     * only mask out the importable bit so that wsi over vtest can be supported.
835     */
836    if (physical_dev->renderer_extensions.EXT_external_memory_dma_buf) {
837       physical_dev->external_memory.renderer_handle_type =
838          VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
839 
840 #if DETECT_OS_ANDROID
841       physical_dev->external_memory.supported_handle_types |=
842          VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
843 #else  /* DETECT_OS_ANDROID */
844       physical_dev->external_memory.supported_handle_types =
845          VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT |
846          VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
847 #endif /* DETECT_OS_ANDROID */
848    }
849 }
850 
851 static void
vn_physical_device_init_external_fence_handles(struct vn_physical_device * physical_dev)852 vn_physical_device_init_external_fence_handles(
853    struct vn_physical_device *physical_dev)
854 {
855    /* The current code manipulates the host-side VkFence directly.
856     * vkWaitForFences is translated to repeated vkGetFenceStatus.
857     *
858     * External fence is not possible currently.  Instead, we cheat by
859     * translating vkGetFenceFdKHR to an empty renderer submission for the
860     * out fence, along with a venus protocol command to fix renderer side
861     * fence payload.
862     *
863     * We would like to create a vn_renderer_sync from a host-side VkFence,
864     * similar to how a vn_renderer_bo is created from a host-side
865     * VkDeviceMemory.  That would require kernel support and tons of works on
866     * the host side.  If we had that, and we kept both the vn_renderer_sync
867     * and the host-side VkFence in sync, we would have the freedom to use
868     * either of them depending on the occasions, and support external fences
869     * and idle waiting.
870     */
871    if (physical_dev->renderer_extensions.KHR_external_fence_fd) {
872       struct vn_ring *ring = physical_dev->instance->ring.ring;
873       const VkPhysicalDeviceExternalFenceInfo info = {
874          .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
875          .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
876       };
877       VkExternalFenceProperties props = {
878          .sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
879       };
880       vn_call_vkGetPhysicalDeviceExternalFenceProperties(
881          ring, vn_physical_device_to_handle(physical_dev), &info, &props);
882 
883       physical_dev->renderer_sync_fd.fence_exportable =
884          props.externalFenceFeatures &
885          VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT;
886    }
887 
888    physical_dev->external_fence_handles = 0;
889 
890    if (physical_dev->instance->renderer->info.has_external_sync) {
891       physical_dev->external_fence_handles =
892          VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
893    }
894 }
895 
896 static void
vn_physical_device_init_external_semaphore_handles(struct vn_physical_device * physical_dev)897 vn_physical_device_init_external_semaphore_handles(
898    struct vn_physical_device *physical_dev)
899 {
900    /* The current code manipulates the host-side VkSemaphore directly.  It
901     * works very well for binary semaphores because there is no CPU operation.
902     * But for timeline semaphores, the situation is similar to that of fences.
903     * vkWaitSemaphores is translated to repeated vkGetSemaphoreCounterValue.
904     *
905     * External semaphore is not possible currently.  Instead, we cheat when
906     * the semaphore is binary and the handle type is sync file. We do an empty
907     * renderer submission for the out fence, along with a venus protocol
908     * command to fix renderer side semaphore payload.
909     *
910     * We would like to create a vn_renderer_sync from a host-side VkSemaphore,
911     * similar to how a vn_renderer_bo is created from a host-side
912     * VkDeviceMemory.  The reasoning is the same as that for fences.
913     * Additionally, we would like the sync file exported from the
914     * vn_renderer_sync to carry the necessary information to identify the
915     * host-side VkSemaphore.  That would allow the consumers to wait on the
916     * host side rather than the guest side.
917     */
918    if (physical_dev->renderer_extensions.KHR_external_semaphore_fd) {
919       struct vn_ring *ring = physical_dev->instance->ring.ring;
920       const VkPhysicalDeviceExternalSemaphoreInfo info = {
921          .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
922          .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
923       };
924       VkExternalSemaphoreProperties props = {
925          .sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
926       };
927       vn_call_vkGetPhysicalDeviceExternalSemaphoreProperties(
928          ring, vn_physical_device_to_handle(physical_dev), &info, &props);
929 
930       physical_dev->renderer_sync_fd.semaphore_exportable =
931          props.externalSemaphoreFeatures &
932          VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT;
933       physical_dev->renderer_sync_fd.semaphore_importable =
934          props.externalSemaphoreFeatures &
935          VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
936    }
937 
938    physical_dev->external_binary_semaphore_handles = 0;
939    physical_dev->external_timeline_semaphore_handles = 0;
940 
941    if (physical_dev->instance->renderer->info.has_external_sync) {
942       physical_dev->external_binary_semaphore_handles =
943          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
944    }
945 }
946 
947 static inline bool
vn_physical_device_get_external_memory_support(const struct vn_physical_device * physical_dev)948 vn_physical_device_get_external_memory_support(
949    const struct vn_physical_device *physical_dev)
950 {
951    if (!physical_dev->external_memory.renderer_handle_type)
952       return false;
953 
954    /* see vn_physical_device_init_external_memory */
955    if (physical_dev->external_memory.renderer_handle_type ==
956        VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) {
957       const struct vk_device_extension_table *renderer_exts =
958          &physical_dev->renderer_extensions;
959       return renderer_exts->EXT_image_drm_format_modifier &&
960              renderer_exts->EXT_queue_family_foreign;
961    }
962 
963    /* expand support once the renderer can run on non-Linux platforms */
964    return false;
965 }
966 
967 static void
vn_physical_device_get_native_extensions(const struct vn_physical_device * physical_dev,struct vk_device_extension_table * exts)968 vn_physical_device_get_native_extensions(
969    const struct vn_physical_device *physical_dev,
970    struct vk_device_extension_table *exts)
971 {
972    memset(exts, 0, sizeof(*exts));
973 
974    if (physical_dev->instance->renderer->info.has_external_sync &&
975        physical_dev->renderer_sync_fd.fence_exportable)
976       exts->KHR_external_fence_fd = true;
977 
978    if (physical_dev->instance->renderer->info.has_external_sync &&
979        physical_dev->renderer_sync_fd.semaphore_importable &&
980        physical_dev->renderer_sync_fd.semaphore_exportable)
981       exts->KHR_external_semaphore_fd = true;
982 
983    const bool can_external_mem =
984       vn_physical_device_get_external_memory_support(physical_dev);
985    if (can_external_mem) {
986 #if DETECT_OS_ANDROID
987       exts->ANDROID_external_memory_android_hardware_buffer = true;
988 
989       /* For wsi, we require renderer:
990        * - semaphore sync fd import for queue submission to skip scrubbing the
991        *   wsi wait semaphores.
992        * - fence sync fd export for QueueSignalReleaseImageANDROID to export a
993        *   sync fd.
994        *
995        * TODO: relax these requirements by:
996        * - properly scrubbing wsi wait semaphores
997        * - not creating external fence but exporting sync fd directly
998        */
999       if (physical_dev->renderer_sync_fd.semaphore_importable &&
1000           physical_dev->renderer_sync_fd.fence_exportable)
1001          exts->ANDROID_native_buffer = true;
1002 #else  /* DETECT_OS_ANDROID */
1003       exts->KHR_external_memory_fd = true;
1004       exts->EXT_external_memory_dma_buf = true;
1005 #endif /* DETECT_OS_ANDROID */
1006    }
1007 
1008 #ifdef VN_USE_WSI_PLATFORM
1009    if (can_external_mem &&
1010        physical_dev->renderer_sync_fd.semaphore_importable) {
1011       exts->KHR_incremental_present = true;
1012       exts->KHR_swapchain = true;
1013       exts->KHR_swapchain_mutable_format = true;
1014    }
1015 
1016    /* VK_EXT_pci_bus_info is required by common wsi to decide whether native
1017     * image or prime blit is used. Meanwhile, venus must stay on native image
1018     * path for proper fencing.
1019     * - For virtgpu, VK_EXT_pci_bus_info is natively supported.
1020     * - For vtest, pci bus info must be queried from the renderer side physical
1021     *   device to be compared against the render node opened by common wsi.
1022     */
1023    exts->EXT_pci_bus_info =
1024       physical_dev->instance->renderer->info.pci.has_bus_info ||
1025       physical_dev->renderer_extensions.EXT_pci_bus_info;
1026 #endif
1027 
1028    exts->EXT_physical_device_drm = true;
1029    /* use common implementation */
1030    exts->EXT_tooling_info = true;
1031    exts->EXT_device_memory_report = true;
1032 }
1033 
1034 static void
vn_physical_device_get_passthrough_extensions(const struct vn_physical_device * physical_dev,struct vk_device_extension_table * exts)1035 vn_physical_device_get_passthrough_extensions(
1036    const struct vn_physical_device *physical_dev,
1037    struct vk_device_extension_table *exts)
1038 {
1039    *exts = (struct vk_device_extension_table){
1040       /* promoted to VK_VERSION_1_1 */
1041       .KHR_16bit_storage = true,
1042       .KHR_bind_memory2 = true,
1043       .KHR_dedicated_allocation = true,
1044       .KHR_descriptor_update_template = true,
1045       .KHR_device_group = true,
1046       .KHR_external_fence = true,
1047       .KHR_external_memory = true,
1048       .KHR_external_semaphore = true,
1049       .KHR_get_memory_requirements2 = true,
1050       .KHR_maintenance1 = true,
1051       .KHR_maintenance2 = true,
1052       .KHR_maintenance3 = true,
1053       .KHR_multiview = true,
1054       .KHR_relaxed_block_layout = true,
1055       .KHR_sampler_ycbcr_conversion = true,
1056       .KHR_shader_draw_parameters = true,
1057       .KHR_storage_buffer_storage_class = true,
1058       .KHR_variable_pointers = true,
1059 
1060       /* promoted to VK_VERSION_1_2 */
1061       .KHR_8bit_storage = true,
1062       .KHR_buffer_device_address = true,
1063       .KHR_create_renderpass2 = true,
1064       .KHR_depth_stencil_resolve = true,
1065       .KHR_draw_indirect_count = true,
1066       .KHR_driver_properties = true,
1067       .KHR_image_format_list = true,
1068       .KHR_imageless_framebuffer = true,
1069       .KHR_sampler_mirror_clamp_to_edge = true,
1070       .KHR_separate_depth_stencil_layouts = true,
1071       .KHR_shader_atomic_int64 = true,
1072       .KHR_shader_float16_int8 = true,
1073       .KHR_shader_float_controls = true,
1074       .KHR_shader_subgroup_extended_types = true,
1075       .KHR_spirv_1_4 = true,
1076       .KHR_timeline_semaphore = true,
1077       .KHR_uniform_buffer_standard_layout = true,
1078       .KHR_vulkan_memory_model = true,
1079       .EXT_descriptor_indexing = true,
1080       .EXT_host_query_reset = true,
1081       .EXT_sampler_filter_minmax = true,
1082       .EXT_scalar_block_layout = true,
1083       .EXT_separate_stencil_usage = true,
1084       .EXT_shader_viewport_index_layer = true,
1085 
1086       /* promoted to VK_VERSION_1_3 */
1087       .KHR_copy_commands2 = true,
1088       .KHR_dynamic_rendering = true,
1089       .KHR_format_feature_flags2 = true,
1090       .KHR_maintenance4 = true,
1091       .KHR_shader_integer_dot_product = true,
1092       .KHR_shader_non_semantic_info = true,
1093       .KHR_shader_terminate_invocation = true,
1094       /* Our implementation requires semaphore sync fd import
1095        * for VK_KHR_synchronization2.
1096        */
1097       .KHR_synchronization2 =
1098          physical_dev->renderer_sync_fd.semaphore_importable,
1099       .KHR_zero_initialize_workgroup_memory = true,
1100       .EXT_4444_formats = true,
1101       .EXT_extended_dynamic_state = true,
1102       .EXT_extended_dynamic_state2 = true,
1103       .EXT_image_robustness = true,
1104       .EXT_inline_uniform_block = true,
1105       .EXT_pipeline_creation_cache_control = true,
1106       /* TODO(VK_EXT_pipeline_creation_feedback): The native implementation
1107        * invalidates all feedback. Teach the venus protocol to receive valid
1108        * feedback from renderer.
1109        *
1110        * Even though we implement this natively, we still require host driver
1111        * support to avoid invalid usage in the renderer, because we (the guest
1112        * driver) do not scrub the extension bits from the
1113        * VkGraphicsPipelineCreateInfo pNext chain.  The host driver still
1114        * writes feedback into VkPipelineCreationFeedback, which is harmless,
1115        * but the renderer does not send the returned feedback to us due to
1116        * protocol deficiencies.
1117        */
1118       .EXT_pipeline_creation_feedback = true,
1119       .EXT_shader_demote_to_helper_invocation = true,
1120       .EXT_subgroup_size_control = true,
1121       .EXT_texel_buffer_alignment = true,
1122       .EXT_texture_compression_astc_hdr = true,
1123       .EXT_ycbcr_2plane_444_formats = true,
1124 
1125       /* KHR */
1126       .KHR_pipeline_library = true,
1127       .KHR_push_descriptor = true,
1128       .KHR_shader_clock = true,
1129       .KHR_shader_expect_assume = true,
1130 
1131       /* EXT */
1132       .EXT_border_color_swizzle = true,
1133       .EXT_calibrated_timestamps = true,
1134       .EXT_color_write_enable = true,
1135       .EXT_conditional_rendering = true,
1136       .EXT_conservative_rasterization = true,
1137       .EXT_custom_border_color = true,
1138       .EXT_depth_clip_control = true,
1139       .EXT_depth_clip_enable = true,
1140       .EXT_extended_dynamic_state3 = true,
1141       .EXT_dynamic_rendering_unused_attachments = true,
1142       .EXT_fragment_shader_interlock = true,
1143       .EXT_graphics_pipeline_library = !VN_DEBUG(NO_GPL),
1144       .EXT_image_2d_view_of_3d = true,
1145       .EXT_image_drm_format_modifier = true,
1146       .EXT_image_view_min_lod = true,
1147       .EXT_index_type_uint8 = true,
1148       .EXT_line_rasterization = true,
1149       .EXT_load_store_op_none = true,
1150       /* TODO: re-enable after generic app compat issues are resolved */
1151       .EXT_memory_budget = false,
1152       .EXT_multi_draw = true,
1153       .EXT_mutable_descriptor_type = true,
1154       .EXT_non_seamless_cube_map = true,
1155       .EXT_primitive_topology_list_restart = true,
1156       .EXT_primitives_generated_query = true,
1157       /* TODO(VK_EXT_private_data): Support natively.
1158        *
1159        * We support this extension with a hybrid native/passthrough model
1160        * until we teach venus how to do deep surgery on pNext
1161        * chains to (a) remove VkDevicePrivateDataCreateInfo, (b) remove Vk
1162        * VkPhysicalDevicePrivateDataFeatures, and (c) modify its bits in
1163        * VkPhysicalDeviceVulkan13Features.
1164        *
1165        * For now, we implement the extension functions natively by using
1166        * Mesa's common implementation. We passthrough
1167        * VkDevicePrivateDataCreateInfo to the renderer, which is harmless.
1168        * We passthrough the extension enablement and feature bits to the
1169        * renderer because otherwise VkDevicePrivateDataCreateInfo would
1170        * cause invalid usage in the renderer. Therefore, even though we
1171        * implement the extension natively, we expose the extension only if the
1172        * renderer supports it too.
1173        */
1174       .EXT_private_data = true,
1175       .EXT_provoking_vertex = true,
1176       .EXT_queue_family_foreign = true,
1177       .EXT_rasterization_order_attachment_access = true,
1178       .EXT_robustness2 = true,
1179       .EXT_shader_stencil_export = true,
1180       .EXT_shader_subgroup_ballot = true,
1181       .EXT_transform_feedback = true,
1182       .EXT_vertex_attribute_divisor = true,
1183       .EXT_vertex_input_dynamic_state = true,
1184 
1185       /* vendor */
1186       .VALVE_mutable_descriptor_type = true,
1187    };
1188 }
1189 
1190 static void
vn_physical_device_init_supported_extensions(struct vn_physical_device * physical_dev)1191 vn_physical_device_init_supported_extensions(
1192    struct vn_physical_device *physical_dev)
1193 {
1194    struct vk_device_extension_table native;
1195    struct vk_device_extension_table passthrough;
1196    vn_physical_device_get_native_extensions(physical_dev, &native);
1197    vn_physical_device_get_passthrough_extensions(physical_dev, &passthrough);
1198 
1199    for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1200       const VkExtensionProperties *props = &vk_device_extensions[i];
1201 
1202 #ifdef ANDROID_STRICT
1203       if (!vk_android_allowed_device_extensions.extensions[i])
1204          continue;
1205 #endif
1206 
1207       if (native.extensions[i]) {
1208          physical_dev->base.base.supported_extensions.extensions[i] = true;
1209          physical_dev->extension_spec_versions[i] = props->specVersion;
1210       } else if (passthrough.extensions[i] &&
1211                  physical_dev->renderer_extensions.extensions[i]) {
1212          physical_dev->base.base.supported_extensions.extensions[i] = true;
1213          physical_dev->extension_spec_versions[i] = MIN2(
1214             physical_dev->extension_spec_versions[i], props->specVersion);
1215       }
1216    }
1217 
1218    /* override VK_ANDROID_native_buffer spec version */
1219    if (native.ANDROID_native_buffer) {
1220       const uint32_t index =
1221          VN_EXTENSION_TABLE_INDEX(native, ANDROID_native_buffer);
1222       physical_dev->extension_spec_versions[index] =
1223          VN_ANDROID_NATIVE_BUFFER_SPEC_VERSION;
1224    }
1225 }
1226 
1227 static VkResult
vn_physical_device_init_renderer_extensions(struct vn_physical_device * physical_dev)1228 vn_physical_device_init_renderer_extensions(
1229    struct vn_physical_device *physical_dev)
1230 {
1231    struct vn_instance *instance = physical_dev->instance;
1232    struct vn_ring *ring = instance->ring.ring;
1233    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1234 
1235    /* get renderer extensions */
1236    uint32_t count;
1237    VkResult result = vn_call_vkEnumerateDeviceExtensionProperties(
1238       ring, vn_physical_device_to_handle(physical_dev), NULL, &count, NULL);
1239    if (result != VK_SUCCESS)
1240       return result;
1241 
1242    VkExtensionProperties *exts = NULL;
1243    if (count) {
1244       exts = vk_alloc(alloc, sizeof(*exts) * count, VN_DEFAULT_ALIGN,
1245                       VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1246       if (!exts)
1247          return VK_ERROR_OUT_OF_HOST_MEMORY;
1248 
1249       result = vn_call_vkEnumerateDeviceExtensionProperties(
1250          ring, vn_physical_device_to_handle(physical_dev), NULL, &count,
1251          exts);
1252       if (result < VK_SUCCESS) {
1253          vk_free(alloc, exts);
1254          return result;
1255       }
1256    }
1257 
1258    physical_dev->extension_spec_versions =
1259       vk_zalloc(alloc,
1260                 sizeof(*physical_dev->extension_spec_versions) *
1261                    VK_DEVICE_EXTENSION_COUNT,
1262                 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1263    if (!physical_dev->extension_spec_versions) {
1264       vk_free(alloc, exts);
1265       return VK_ERROR_OUT_OF_HOST_MEMORY;
1266    }
1267 
1268    for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1269       const VkExtensionProperties *props = &vk_device_extensions[i];
1270       for (uint32_t j = 0; j < count; j++) {
1271          if (strcmp(props->extensionName, exts[j].extensionName))
1272             continue;
1273 
1274          /* check encoder support */
1275          const uint32_t enc_ext_spec_version =
1276             vn_extension_get_spec_version(props->extensionName);
1277          if (!enc_ext_spec_version)
1278             continue;
1279 
1280          physical_dev->renderer_extensions.extensions[i] = true;
1281          physical_dev->extension_spec_versions[i] =
1282             MIN2(exts[j].specVersion, enc_ext_spec_version);
1283 
1284          break;
1285       }
1286    }
1287 
1288    vk_free(alloc, exts);
1289 
1290    return VK_SUCCESS;
1291 }
1292 
1293 static VkResult
vn_physical_device_init_renderer_version(struct vn_physical_device * physical_dev)1294 vn_physical_device_init_renderer_version(
1295    struct vn_physical_device *physical_dev)
1296 {
1297    struct vn_instance *instance = physical_dev->instance;
1298    struct vn_ring *ring = instance->ring.ring;
1299 
1300    /*
1301     * We either check and enable VK_KHR_get_physical_device_properties2, or we
1302     * must use vkGetPhysicalDeviceProperties to get the device-level version.
1303     */
1304    VkPhysicalDeviceProperties props;
1305    vn_call_vkGetPhysicalDeviceProperties(
1306       ring, vn_physical_device_to_handle(physical_dev), &props);
1307    if (props.apiVersion < VN_MIN_RENDERER_VERSION) {
1308       if (VN_DEBUG(INIT)) {
1309          vn_log(instance, "%s has unsupported renderer device version %d.%d",
1310                 props.deviceName, VK_VERSION_MAJOR(props.apiVersion),
1311                 VK_VERSION_MINOR(props.apiVersion));
1312       }
1313       return VK_ERROR_INITIALIZATION_FAILED;
1314    }
1315 
1316    /* device version for internal use is capped */
1317    physical_dev->renderer_version =
1318       MIN3(props.apiVersion, instance->renderer_api_version,
1319            instance->renderer->info.vk_xml_version);
1320 
1321    return VK_SUCCESS;
1322 }
1323 
1324 static void
vn_image_format_cache_debug_dump(struct vn_image_format_properties_cache * cache)1325 vn_image_format_cache_debug_dump(
1326    struct vn_image_format_properties_cache *cache)
1327 {
1328    vn_log(NULL, "  hit %u\n", cache->debug.cache_hit_count);
1329    vn_log(NULL, "  miss %u\n", cache->debug.cache_miss_count);
1330    vn_log(NULL, "  skip %u\n", cache->debug.cache_skip_count);
1331 }
1332 
1333 static void
vn_image_format_cache_init(struct vn_physical_device * physical_dev)1334 vn_image_format_cache_init(struct vn_physical_device *physical_dev)
1335 {
1336    struct vn_image_format_properties_cache *cache =
1337       &physical_dev->image_format_cache;
1338 
1339    if (VN_PERF(NO_ASYNC_IMAGE_FORMAT))
1340       return;
1341 
1342    cache->ht = _mesa_hash_table_create(NULL, vn_cache_key_hash_function,
1343                                        vn_cache_key_equal_function);
1344    if (!cache->ht)
1345       return;
1346 
1347    simple_mtx_init(&cache->mutex, mtx_plain);
1348    list_inithead(&cache->lru);
1349 }
1350 
1351 static void
vn_image_format_cache_fini(struct vn_physical_device * physical_dev)1352 vn_image_format_cache_fini(struct vn_physical_device *physical_dev)
1353 {
1354    const VkAllocationCallbacks *alloc =
1355       &physical_dev->base.base.instance->alloc;
1356    struct vn_image_format_properties_cache *cache =
1357       &physical_dev->image_format_cache;
1358 
1359    if (!cache->ht)
1360       return;
1361 
1362    hash_table_foreach(cache->ht, hash_entry) {
1363       struct vn_image_format_cache_entry *cache_entry = hash_entry->data;
1364       list_del(&cache_entry->head);
1365       vk_free(alloc, cache_entry);
1366    }
1367    assert(list_is_empty(&cache->lru));
1368 
1369    _mesa_hash_table_destroy(cache->ht, NULL);
1370 
1371    simple_mtx_destroy(&cache->mutex);
1372 
1373    if (VN_DEBUG(CACHE))
1374       vn_image_format_cache_debug_dump(cache);
1375 }
1376 
1377 static VkResult
vn_physical_device_init(struct vn_physical_device * physical_dev)1378 vn_physical_device_init(struct vn_physical_device *physical_dev)
1379 {
1380    struct vn_instance *instance = physical_dev->instance;
1381    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1382    VkResult result;
1383 
1384    result = vn_physical_device_init_renderer_extensions(physical_dev);
1385    if (result != VK_SUCCESS)
1386       return result;
1387 
1388    vn_physical_device_init_external_memory(physical_dev);
1389    vn_physical_device_init_external_fence_handles(physical_dev);
1390    vn_physical_device_init_external_semaphore_handles(physical_dev);
1391 
1392    vn_physical_device_init_supported_extensions(physical_dev);
1393 
1394    result = vn_physical_device_init_queue_family_properties(physical_dev);
1395    if (result != VK_SUCCESS)
1396       goto fail;
1397 
1398    /* TODO query all caps with minimal round trips */
1399    vn_physical_device_init_features(physical_dev);
1400    vn_physical_device_init_properties(physical_dev);
1401 
1402    vn_physical_device_init_memory_properties(physical_dev);
1403 
1404    result = vn_wsi_init(physical_dev);
1405    if (result != VK_SUCCESS)
1406       goto fail;
1407 
1408    simple_mtx_init(&physical_dev->format_update_mutex, mtx_plain);
1409    util_sparse_array_init(&physical_dev->format_properties,
1410                           sizeof(struct vn_format_properties_entry), 64);
1411 
1412    vn_image_format_cache_init(physical_dev);
1413 
1414    return VK_SUCCESS;
1415 
1416 fail:
1417    vk_free(alloc, physical_dev->extension_spec_versions);
1418    vk_free(alloc, physical_dev->queue_family_properties);
1419    return result;
1420 }
1421 
1422 void
vn_physical_device_fini(struct vn_physical_device * physical_dev)1423 vn_physical_device_fini(struct vn_physical_device *physical_dev)
1424 {
1425    struct vn_instance *instance = physical_dev->instance;
1426    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1427 
1428    vn_image_format_cache_fini(physical_dev);
1429 
1430    simple_mtx_destroy(&physical_dev->format_update_mutex);
1431    util_sparse_array_finish(&physical_dev->format_properties);
1432 
1433    vn_wsi_fini(physical_dev);
1434    vk_free(alloc, physical_dev->extension_spec_versions);
1435    vk_free(alloc, physical_dev->queue_family_properties);
1436 
1437    vn_physical_device_base_fini(&physical_dev->base);
1438 }
1439 
1440 static struct vn_physical_device *
find_physical_device(struct vn_physical_device * physical_devs,uint32_t count,vn_object_id id)1441 find_physical_device(struct vn_physical_device *physical_devs,
1442                      uint32_t count,
1443                      vn_object_id id)
1444 {
1445    for (uint32_t i = 0; i < count; i++) {
1446       if (physical_devs[i].base.id == id)
1447          return &physical_devs[i];
1448    }
1449    return NULL;
1450 }
1451 
1452 static VkResult
vn_instance_enumerate_physical_device_groups_locked(struct vn_instance * instance,struct vn_physical_device * physical_devs,uint32_t physical_dev_count)1453 vn_instance_enumerate_physical_device_groups_locked(
1454    struct vn_instance *instance,
1455    struct vn_physical_device *physical_devs,
1456    uint32_t physical_dev_count)
1457 {
1458    VkInstance instance_handle = vn_instance_to_handle(instance);
1459    struct vn_ring *ring = instance->ring.ring;
1460    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1461    VkResult result;
1462 
1463    uint32_t count;
1464    result = vn_call_vkEnumeratePhysicalDeviceGroups(ring, instance_handle,
1465                                                     &count, NULL);
1466    if (result != VK_SUCCESS)
1467       return result;
1468 
1469    VkPhysicalDeviceGroupProperties *groups =
1470       vk_alloc(alloc, sizeof(*groups) * count, VN_DEFAULT_ALIGN,
1471                VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1472    if (!groups)
1473       return VK_ERROR_OUT_OF_HOST_MEMORY;
1474 
1475    /* VkPhysicalDeviceGroupProperties::physicalDevices is treated as an input
1476     * by the encoder.  Each VkPhysicalDevice must point to a valid object.
1477     * Each object must have id 0 as well, which is interpreted as a query by
1478     * the renderer.
1479     */
1480    struct vn_physical_device_base *temp_objs =
1481       vk_zalloc(alloc, sizeof(*temp_objs) * VK_MAX_DEVICE_GROUP_SIZE * count,
1482                 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1483    if (!temp_objs) {
1484       vk_free(alloc, groups);
1485       return VK_ERROR_OUT_OF_HOST_MEMORY;
1486    }
1487 
1488    for (uint32_t i = 0; i < count; i++) {
1489       VkPhysicalDeviceGroupProperties *group = &groups[i];
1490       group->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES;
1491       group->pNext = NULL;
1492       for (uint32_t j = 0; j < VK_MAX_DEVICE_GROUP_SIZE; j++) {
1493          struct vn_physical_device_base *temp_obj =
1494             &temp_objs[VK_MAX_DEVICE_GROUP_SIZE * i + j];
1495          temp_obj->base.base.type = VK_OBJECT_TYPE_PHYSICAL_DEVICE;
1496          group->physicalDevices[j] = (VkPhysicalDevice)temp_obj;
1497       }
1498    }
1499 
1500    result = vn_call_vkEnumeratePhysicalDeviceGroups(ring, instance_handle,
1501                                                     &count, groups);
1502    if (result != VK_SUCCESS) {
1503       vk_free(alloc, groups);
1504       vk_free(alloc, temp_objs);
1505       return result;
1506    }
1507 
1508    /* fix VkPhysicalDeviceGroupProperties::physicalDevices to point to
1509     * physical_devs and discard unsupported ones
1510     */
1511    uint32_t supported_count = 0;
1512    for (uint32_t i = 0; i < count; i++) {
1513       VkPhysicalDeviceGroupProperties *group = &groups[i];
1514 
1515       uint32_t group_physical_dev_count = 0;
1516       for (uint32_t j = 0; j < group->physicalDeviceCount; j++) {
1517          struct vn_physical_device_base *temp_obj =
1518             (struct vn_physical_device_base *)group->physicalDevices[j];
1519          struct vn_physical_device *physical_dev = find_physical_device(
1520             physical_devs, physical_dev_count, temp_obj->id);
1521          if (!physical_dev)
1522             continue;
1523 
1524          group->physicalDevices[group_physical_dev_count++] =
1525             vn_physical_device_to_handle(physical_dev);
1526       }
1527 
1528       group->physicalDeviceCount = group_physical_dev_count;
1529       if (!group->physicalDeviceCount)
1530          continue;
1531 
1532       if (supported_count < i)
1533          groups[supported_count] = *group;
1534       supported_count++;
1535    }
1536 
1537    count = supported_count;
1538    assert(count);
1539 
1540    vk_free(alloc, temp_objs);
1541 
1542    instance->physical_device.groups = groups;
1543    instance->physical_device.group_count = count;
1544 
1545    return VK_SUCCESS;
1546 }
1547 
1548 static VkResult
enumerate_physical_devices(struct vn_instance * instance,struct vn_physical_device ** out_physical_devs,uint32_t * out_count)1549 enumerate_physical_devices(struct vn_instance *instance,
1550                            struct vn_physical_device **out_physical_devs,
1551                            uint32_t *out_count)
1552 {
1553    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1554    struct vn_ring *ring = instance->ring.ring;
1555    struct vn_physical_device *physical_devs = NULL;
1556    VkPhysicalDevice *handles = NULL;
1557    VkResult result;
1558 
1559    uint32_t count = 0;
1560    result = vn_call_vkEnumeratePhysicalDevices(
1561       ring, vn_instance_to_handle(instance), &count, NULL);
1562    if (result != VK_SUCCESS || !count)
1563       return result;
1564 
1565    physical_devs =
1566       vk_zalloc(alloc, sizeof(*physical_devs) * count, VN_DEFAULT_ALIGN,
1567                 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1568    if (!physical_devs)
1569       return VK_ERROR_OUT_OF_HOST_MEMORY;
1570 
1571    handles = vk_alloc(alloc, sizeof(*handles) * count, VN_DEFAULT_ALIGN,
1572                       VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1573    if (!handles) {
1574       vk_free(alloc, physical_devs);
1575       return VK_ERROR_OUT_OF_HOST_MEMORY;
1576    }
1577 
1578    for (uint32_t i = 0; i < count; i++) {
1579       struct vn_physical_device *physical_dev = &physical_devs[i];
1580 
1581       struct vk_physical_device_dispatch_table dispatch_table;
1582       vk_physical_device_dispatch_table_from_entrypoints(
1583          &dispatch_table, &vn_physical_device_entrypoints, true);
1584       vk_physical_device_dispatch_table_from_entrypoints(
1585          &dispatch_table, &wsi_physical_device_entrypoints, false);
1586       result = vn_physical_device_base_init(
1587          &physical_dev->base, &instance->base, NULL, &dispatch_table);
1588       if (result != VK_SUCCESS) {
1589          count = i;
1590          goto fail;
1591       }
1592 
1593       physical_dev->instance = instance;
1594 
1595       handles[i] = vn_physical_device_to_handle(physical_dev);
1596    }
1597 
1598    result = vn_call_vkEnumeratePhysicalDevices(
1599       ring, vn_instance_to_handle(instance), &count, handles);
1600    if (result != VK_SUCCESS)
1601       goto fail;
1602 
1603    vk_free(alloc, handles);
1604    *out_physical_devs = physical_devs;
1605    *out_count = count;
1606 
1607    return VK_SUCCESS;
1608 
1609 fail:
1610    for (uint32_t i = 0; i < count; i++)
1611       vn_physical_device_base_fini(&physical_devs[i].base);
1612    vk_free(alloc, physical_devs);
1613    vk_free(alloc, handles);
1614    return result;
1615 }
1616 
1617 static uint32_t
filter_physical_devices(struct vn_physical_device * physical_devs,uint32_t count)1618 filter_physical_devices(struct vn_physical_device *physical_devs,
1619                         uint32_t count)
1620 {
1621    uint32_t supported_count = 0;
1622    for (uint32_t i = 0; i < count; i++) {
1623       struct vn_physical_device *physical_dev = &physical_devs[i];
1624 
1625       /* init renderer version and discard unsupported devices */
1626       VkResult result =
1627          vn_physical_device_init_renderer_version(physical_dev);
1628       if (result != VK_SUCCESS) {
1629          vn_physical_device_base_fini(&physical_dev->base);
1630          continue;
1631       }
1632 
1633       if (supported_count < i)
1634          physical_devs[supported_count] = *physical_dev;
1635       supported_count++;
1636    }
1637 
1638    return supported_count;
1639 }
1640 
1641 static VkResult
vn_instance_enumerate_physical_devices_and_groups(struct vn_instance * instance)1642 vn_instance_enumerate_physical_devices_and_groups(struct vn_instance *instance)
1643 {
1644    const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1645    struct vn_physical_device *physical_devs = NULL;
1646    uint32_t count = 0;
1647    VkResult result = VK_SUCCESS;
1648 
1649    mtx_lock(&instance->physical_device.mutex);
1650 
1651    if (instance->physical_device.initialized)
1652       goto unlock;
1653    instance->physical_device.initialized = true;
1654 
1655    result = enumerate_physical_devices(instance, &physical_devs, &count);
1656    if (result != VK_SUCCESS)
1657       goto unlock;
1658 
1659    count = filter_physical_devices(physical_devs, count);
1660    if (!count) {
1661       vk_free(alloc, physical_devs);
1662       goto unlock;
1663    }
1664 
1665    /* fully initialize physical devices */
1666    for (uint32_t i = 0; i < count; i++) {
1667       struct vn_physical_device *physical_dev = &physical_devs[i];
1668 
1669       result = vn_physical_device_init(physical_dev);
1670       if (result != VK_SUCCESS) {
1671          for (uint32_t j = 0; j < i; j++)
1672             vn_physical_device_fini(&physical_devs[j]);
1673          for (uint32_t j = i; j < count; j++)
1674             vn_physical_device_base_fini(&physical_devs[j].base);
1675          vk_free(alloc, physical_devs);
1676          goto unlock;
1677       }
1678    }
1679 
1680    result = vn_instance_enumerate_physical_device_groups_locked(
1681       instance, physical_devs, count);
1682    if (result != VK_SUCCESS) {
1683       for (uint32_t i = 0; i < count; i++)
1684          vn_physical_device_fini(&physical_devs[i]);
1685       vk_free(alloc, physical_devs);
1686       goto unlock;
1687    }
1688 
1689    instance->physical_device.devices = physical_devs;
1690    instance->physical_device.device_count = count;
1691 
1692 unlock:
1693    mtx_unlock(&instance->physical_device.mutex);
1694    return result;
1695 }
1696 
1697 /* physical device commands */
1698 
1699 VkResult
vn_EnumeratePhysicalDevices(VkInstance _instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)1700 vn_EnumeratePhysicalDevices(VkInstance _instance,
1701                             uint32_t *pPhysicalDeviceCount,
1702                             VkPhysicalDevice *pPhysicalDevices)
1703 {
1704    struct vn_instance *instance = vn_instance_from_handle(_instance);
1705 
1706    VkResult result =
1707       vn_instance_enumerate_physical_devices_and_groups(instance);
1708    if (result != VK_SUCCESS)
1709       return vn_error(instance, result);
1710 
1711    VK_OUTARRAY_MAKE_TYPED(VkPhysicalDevice, out, pPhysicalDevices,
1712                           pPhysicalDeviceCount);
1713    for (uint32_t i = 0; i < instance->physical_device.device_count; i++) {
1714       vk_outarray_append_typed(VkPhysicalDevice, &out, physical_dev) {
1715          *physical_dev = vn_physical_device_to_handle(
1716             &instance->physical_device.devices[i]);
1717       }
1718    }
1719 
1720    return vk_outarray_status(&out);
1721 }
1722 
1723 VkResult
vn_EnumeratePhysicalDeviceGroups(VkInstance _instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)1724 vn_EnumeratePhysicalDeviceGroups(
1725    VkInstance _instance,
1726    uint32_t *pPhysicalDeviceGroupCount,
1727    VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
1728 {
1729    struct vn_instance *instance = vn_instance_from_handle(_instance);
1730 
1731    VkResult result =
1732       vn_instance_enumerate_physical_devices_and_groups(instance);
1733    if (result != VK_SUCCESS)
1734       return vn_error(instance, result);
1735 
1736    VK_OUTARRAY_MAKE_TYPED(VkPhysicalDeviceGroupProperties, out,
1737                           pPhysicalDeviceGroupProperties,
1738                           pPhysicalDeviceGroupCount);
1739    for (uint32_t i = 0; i < instance->physical_device.group_count; i++) {
1740       vk_outarray_append_typed(VkPhysicalDeviceGroupProperties, &out, props) {
1741          *props = instance->physical_device.groups[i];
1742       }
1743    }
1744 
1745    return vk_outarray_status(&out);
1746 }
1747 
1748 VkResult
vn_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1749 vn_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
1750                                       const char *pLayerName,
1751                                       uint32_t *pPropertyCount,
1752                                       VkExtensionProperties *pProperties)
1753 {
1754    struct vn_physical_device *physical_dev =
1755       vn_physical_device_from_handle(physicalDevice);
1756 
1757    if (pLayerName)
1758       return vn_error(physical_dev->instance, VK_ERROR_LAYER_NOT_PRESENT);
1759 
1760    VK_OUTARRAY_MAKE_TYPED(VkExtensionProperties, out, pProperties,
1761                           pPropertyCount);
1762    for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1763       if (physical_dev->base.base.supported_extensions.extensions[i]) {
1764          vk_outarray_append_typed(VkExtensionProperties, &out, prop) {
1765             *prop = vk_device_extensions[i];
1766             prop->specVersion = physical_dev->extension_spec_versions[i];
1767          }
1768       }
1769    }
1770 
1771    return vk_outarray_status(&out);
1772 }
1773 
1774 VkResult
vn_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1775 vn_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
1776                                   uint32_t *pPropertyCount,
1777                                   VkLayerProperties *pProperties)
1778 {
1779    *pPropertyCount = 0;
1780    return VK_SUCCESS;
1781 }
1782 
1783 static struct vn_format_properties_entry *
vn_physical_device_get_format_properties(struct vn_physical_device * physical_dev,VkFormat format)1784 vn_physical_device_get_format_properties(
1785    struct vn_physical_device *physical_dev, VkFormat format)
1786 {
1787    return util_sparse_array_get(&physical_dev->format_properties, format);
1788 }
1789 
1790 static void
vn_physical_device_add_format_properties(struct vn_physical_device * physical_dev,struct vn_format_properties_entry * entry,const VkFormatProperties * props)1791 vn_physical_device_add_format_properties(
1792    struct vn_physical_device *physical_dev,
1793    struct vn_format_properties_entry *entry,
1794    const VkFormatProperties *props)
1795 {
1796    simple_mtx_lock(&physical_dev->format_update_mutex);
1797    if (!entry->valid) {
1798       entry->properties = *props;
1799       entry->valid = true;
1800    }
1801    simple_mtx_unlock(&physical_dev->format_update_mutex);
1802 }
1803 
1804 void
vn_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1805 vn_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
1806                                 VkPhysicalDeviceProperties2 *pProperties)
1807 {
1808    struct vn_physical_device *physical_dev =
1809       vn_physical_device_from_handle(physicalDevice);
1810    const struct vn_physical_device_properties *in_props =
1811       &physical_dev->properties;
1812 
1813    pProperties->properties = in_props->vulkan_1_0;
1814 
1815    vk_foreach_struct(out, pProperties->pNext) {
1816       if (vk_get_physical_device_core_1_1_property_ext(out,
1817                                                        &in_props->vulkan_1_1))
1818          continue;
1819 
1820       if (vk_get_physical_device_core_1_2_property_ext(out,
1821                                                        &in_props->vulkan_1_2))
1822          continue;
1823 
1824       if (vk_get_physical_device_core_1_3_property_ext(out,
1825                                                        &in_props->vulkan_1_3))
1826          continue;
1827 
1828       /* Cast to avoid warnings for values outside VkStructureType. */
1829       switch ((int32_t)out->sType) {
1830 
1831 #define CASE(stype, member)                                                  \
1832    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_##stype:                           \
1833       vk_copy_struct_guts(out, (VkBaseInStructure *)&in_props->member,       \
1834                           sizeof(in_props->member));                         \
1835       break
1836 
1837          /* clang-format off */
1838 
1839       /* KHR */
1840       CASE(PUSH_DESCRIPTOR_PROPERTIES_KHR, push_descriptor);
1841 
1842       /* EXT */
1843       CASE(CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, conservative_rasterization);
1844       CASE(CUSTOM_BORDER_COLOR_PROPERTIES_EXT, custom_border_color);
1845       CASE(EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, extended_dynamic_state_3);
1846       CASE(GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, graphics_pipeline_library);
1847       CASE(LINE_RASTERIZATION_PROPERTIES_EXT, line_rasterization);
1848       CASE(MULTI_DRAW_PROPERTIES_EXT, multi_draw);
1849       CASE(PROVOKING_VERTEX_PROPERTIES_EXT, provoking_vertex);
1850       CASE(ROBUSTNESS_2_PROPERTIES_EXT, robustness_2);
1851       CASE(TRANSFORM_FEEDBACK_PROPERTIES_EXT, transform_feedback);
1852       CASE(VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, vertex_attribute_divisor);
1853 
1854          /* clang-format on */
1855 
1856       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: {
1857          VkPhysicalDeviceDrmPropertiesEXT *out_props = (void *)out;
1858          const struct vn_renderer_info *info =
1859             &physical_dev->instance->renderer->info;
1860 
1861          out_props->hasPrimary = info->drm.has_primary;
1862          out_props->primaryMajor = info->drm.primary_major;
1863          out_props->primaryMinor = info->drm.primary_minor;
1864          out_props->hasRender = info->drm.has_render;
1865          out_props->renderMajor = info->drm.render_major;
1866          out_props->renderMinor = info->drm.render_minor;
1867          break;
1868       }
1869       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
1870          /* this is used by WSI */
1871          if (physical_dev->instance->renderer->info.pci.has_bus_info) {
1872             VkPhysicalDevicePCIBusInfoPropertiesEXT *out_props = (void *)out;
1873             const struct vn_renderer_info *info =
1874                &physical_dev->instance->renderer->info;
1875 
1876             out_props->pciDomain = info->pci.domain;
1877             out_props->pciBus = info->pci.bus;
1878             out_props->pciDevice = info->pci.device;
1879             out_props->pciFunction = info->pci.function;
1880          } else {
1881             assert(VN_DEBUG(VTEST));
1882             vk_copy_struct_guts(out,
1883                                 (VkBaseInStructure *)&in_props->pci_bus_info,
1884                                 sizeof(in_props->pci_bus_info));
1885          }
1886          break;
1887       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID: {
1888          VkPhysicalDevicePresentationPropertiesANDROID *out_props =
1889             (void *)out;
1890          out_props->sharedImage =
1891             vn_android_gralloc_get_shared_present_usage() ? VK_TRUE
1892                                                           : VK_FALSE;
1893          break;
1894       }
1895 
1896       default:
1897          break;
1898 #undef CASE
1899       }
1900    }
1901 }
1902 
1903 void
vn_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1904 vn_GetPhysicalDeviceQueueFamilyProperties2(
1905    VkPhysicalDevice physicalDevice,
1906    uint32_t *pQueueFamilyPropertyCount,
1907    VkQueueFamilyProperties2 *pQueueFamilyProperties)
1908 {
1909    struct vn_physical_device *physical_dev =
1910       vn_physical_device_from_handle(physicalDevice);
1911 
1912    VK_OUTARRAY_MAKE_TYPED(VkQueueFamilyProperties2, out,
1913                           pQueueFamilyProperties, pQueueFamilyPropertyCount);
1914    for (uint32_t i = 0; i < physical_dev->queue_family_count; i++) {
1915       vk_outarray_append_typed(VkQueueFamilyProperties2, &out, props) {
1916          *props = physical_dev->queue_family_properties[i];
1917       }
1918    }
1919 }
1920 
1921 void
vn_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1922 vn_GetPhysicalDeviceMemoryProperties2(
1923    VkPhysicalDevice physicalDevice,
1924    VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
1925 {
1926    struct vn_physical_device *physical_dev =
1927       vn_physical_device_from_handle(physicalDevice);
1928    struct vn_ring *ring = physical_dev->instance->ring.ring;
1929    VkPhysicalDeviceMemoryBudgetPropertiesEXT *memory_budget = NULL;
1930 
1931    /* Don't waste time searching for unsupported structs. */
1932    if (physical_dev->base.base.supported_extensions.EXT_memory_budget) {
1933       memory_budget =
1934          vk_find_struct(pMemoryProperties->pNext,
1935                         PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT);
1936    }
1937 
1938    /* When the app queries invariant memory properties, we return a cached
1939     * copy. For dynamic properties, we must query the server.
1940     */
1941    if (memory_budget) {
1942       vn_call_vkGetPhysicalDeviceMemoryProperties2(ring, physicalDevice,
1943                                                    pMemoryProperties);
1944    }
1945 
1946    /* Even when we query the server for memory properties, we must still
1947     * overwrite the invariant memory properties returned from the server with
1948     * our cached version.  Our cached version may differ from the server's
1949     * version due to workarounds.
1950     */
1951    pMemoryProperties->memoryProperties = physical_dev->memory_properties;
1952 }
1953 
1954 void
vn_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)1955 vn_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,
1956                                       VkFormat format,
1957                                       VkFormatProperties2 *pFormatProperties)
1958 {
1959    struct vn_physical_device *physical_dev =
1960       vn_physical_device_from_handle(physicalDevice);
1961    struct vn_ring *ring = physical_dev->instance->ring.ring;
1962 
1963    struct vn_format_properties_entry *entry = NULL;
1964    if (!pFormatProperties->pNext) {
1965       entry = vn_physical_device_get_format_properties(physical_dev, format);
1966       if (entry->valid) {
1967          pFormatProperties->formatProperties = entry->properties;
1968          return;
1969       }
1970    }
1971 
1972    vn_call_vkGetPhysicalDeviceFormatProperties2(ring, physicalDevice, format,
1973                                                 pFormatProperties);
1974 
1975    if (entry) {
1976       vn_physical_device_add_format_properties(
1977          physical_dev, entry, &pFormatProperties->formatProperties);
1978    }
1979 }
1980 
1981 struct vn_physical_device_image_format_info {
1982    VkPhysicalDeviceImageFormatInfo2 format;
1983    VkPhysicalDeviceExternalImageFormatInfo external;
1984    VkImageFormatListCreateInfo list;
1985    VkImageStencilUsageCreateInfo stencil_usage;
1986    VkPhysicalDeviceImageDrmFormatModifierInfoEXT modifier;
1987 };
1988 
1989 static const VkPhysicalDeviceImageFormatInfo2 *
vn_physical_device_fix_image_format_info(const VkPhysicalDeviceImageFormatInfo2 * info,const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,struct vn_physical_device_image_format_info * local_info)1990 vn_physical_device_fix_image_format_info(
1991    const VkPhysicalDeviceImageFormatInfo2 *info,
1992    const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,
1993    struct vn_physical_device_image_format_info *local_info)
1994 {
1995    local_info->format = *info;
1996    VkBaseOutStructure *dst = (void *)&local_info->format;
1997 
1998    bool is_ahb = false;
1999    bool has_format_list = false;
2000    /* we should generate deep copy functions... */
2001    vk_foreach_struct_const(src, info->pNext) {
2002       void *pnext = NULL;
2003       switch (src->sType) {
2004       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
2005          memcpy(&local_info->external, src, sizeof(local_info->external));
2006          is_ahb =
2007             local_info->external.handleType ==
2008             VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2009          local_info->external.handleType = renderer_handle_type;
2010          pnext = &local_info->external;
2011          break;
2012       case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
2013          has_format_list = true;
2014          memcpy(&local_info->list, src, sizeof(local_info->list));
2015          pnext = &local_info->list;
2016          break;
2017       case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
2018          memcpy(&local_info->stencil_usage, src,
2019                 sizeof(local_info->stencil_usage));
2020          pnext = &local_info->stencil_usage;
2021          break;
2022       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
2023          memcpy(&local_info->modifier, src, sizeof(local_info->modifier));
2024          pnext = &local_info->modifier;
2025          break;
2026       default:
2027          break;
2028       }
2029 
2030       if (pnext) {
2031          dst->pNext = pnext;
2032          dst = pnext;
2033       }
2034    }
2035 
2036    if (is_ahb) {
2037       assert(local_info->format.tiling !=
2038              VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
2039       local_info->format.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
2040       if (!vn_android_get_drm_format_modifier_info(&local_info->format,
2041                                                    &local_info->modifier))
2042          return NULL;
2043 
2044       dst->pNext = (void *)&local_info->modifier;
2045       dst = dst->pNext;
2046 
2047       if ((info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) &&
2048           (!has_format_list || !local_info->list.viewFormatCount)) {
2049          /* 12.3. Images
2050           *
2051           * If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT and flags
2052           * contains VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, then the pNext chain
2053           * must include a VkImageFormatListCreateInfo structure with non-zero
2054           * viewFormatCount.
2055           */
2056          VkImageFormatListCreateInfo *list = &local_info->list;
2057          uint32_t vcount = 0;
2058          const VkFormat *vformats =
2059             vn_android_format_to_view_formats(info->format, &vcount);
2060          if (!vformats) {
2061             /* local_info persists through the image format query call */
2062             vformats = &local_info->format.format;
2063             vcount = 1;
2064          }
2065 
2066          list->sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO;
2067          list->viewFormatCount = vcount;
2068          list->pViewFormats = vformats;
2069 
2070          if (!has_format_list) {
2071             dst->pNext = (void *)list;
2072             dst = dst->pNext;
2073          }
2074       }
2075    }
2076 
2077    dst->pNext = NULL;
2078 
2079    return &local_info->format;
2080 }
2081 
2082 static uint32_t
vn_modifier_plane_count(struct vn_physical_device * physical_dev,VkFormat format,uint64_t modifier)2083 vn_modifier_plane_count(struct vn_physical_device *physical_dev,
2084                         VkFormat format,
2085                         uint64_t modifier)
2086 {
2087    VkPhysicalDevice physical_dev_handle =
2088       vn_physical_device_to_handle(physical_dev);
2089 
2090    VkDrmFormatModifierPropertiesListEXT modifier_list = {
2091       .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
2092       .pDrmFormatModifierProperties = NULL,
2093    };
2094    VkFormatProperties2 format_props = {
2095       .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
2096       .pNext = &modifier_list,
2097    };
2098    vn_GetPhysicalDeviceFormatProperties2(physical_dev_handle, format,
2099                                          &format_props);
2100 
2101    STACK_ARRAY(VkDrmFormatModifierPropertiesEXT, modifier_props,
2102                modifier_list.drmFormatModifierCount);
2103    if (!modifier_props)
2104       return 0;
2105    modifier_list.pDrmFormatModifierProperties = modifier_props;
2106 
2107    vn_GetPhysicalDeviceFormatProperties2(physical_dev_handle, format,
2108                                          &format_props);
2109 
2110    uint32_t plane_count = 0;
2111    for (uint32_t i = 0; i < modifier_list.drmFormatModifierCount; i++) {
2112       const struct VkDrmFormatModifierPropertiesEXT *props =
2113          &modifier_list.pDrmFormatModifierProperties[i];
2114       if (modifier == props->drmFormatModifier) {
2115          plane_count = props->drmFormatModifierPlaneCount;
2116          break;
2117       }
2118    }
2119 
2120    STACK_ARRAY_FINISH(modifier_props);
2121    return plane_count;
2122 }
2123 
2124 static bool
vn_image_get_image_format_key(struct vn_physical_device * physical_dev,const VkPhysicalDeviceImageFormatInfo2 * format_info,const VkImageFormatProperties2 * format_props,uint8_t * key)2125 vn_image_get_image_format_key(
2126    struct vn_physical_device *physical_dev,
2127    const VkPhysicalDeviceImageFormatInfo2 *format_info,
2128    const VkImageFormatProperties2 *format_props,
2129    uint8_t *key)
2130 {
2131    struct mesa_sha1 sha1_ctx;
2132 
2133    if (!physical_dev->image_format_cache.ht)
2134       return false;
2135 
2136    _mesa_sha1_init(&sha1_ctx);
2137 
2138    /* VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext
2139     * Each pNext member of any structure (including this one) in the pNext
2140     * chain must be either NULL or a pointer to a valid instance of
2141     * VkImageCompressionControlEXT, VkImageFormatListCreateInfo,
2142     * VkImageStencilUsageCreateInfo, VkOpticalFlowImageFormatInfoNV,
2143     * VkPhysicalDeviceExternalImageFormatInfo,
2144     * VkPhysicalDeviceImageDrmFormatModifierInfoEXT,
2145     * VkPhysicalDeviceImageViewImageFormatInfoEXT, or VkVideoProfileListInfoKHR
2146     *
2147     * Exclude VkOpticalFlowImageFormatInfoNV and VkVideoProfileListInfoKHR
2148     */
2149    if (format_info->pNext) {
2150       vk_foreach_struct_const(src, format_info->pNext) {
2151          switch (src->sType) {
2152          case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
2153             struct VkImageCompressionControlEXT *compression_control =
2154                (struct VkImageCompressionControlEXT *)src;
2155             _mesa_sha1_update(&sha1_ctx, &compression_control->flags,
2156                               sizeof(VkImageCompressionFlagsEXT));
2157             _mesa_sha1_update(
2158                &sha1_ctx, compression_control->pFixedRateFlags,
2159                sizeof(uint32_t) *
2160                   compression_control->compressionControlPlaneCount);
2161             break;
2162          }
2163          case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
2164             struct VkImageFormatListCreateInfo *format_list =
2165                (struct VkImageFormatListCreateInfo *)src;
2166             _mesa_sha1_update(
2167                &sha1_ctx, format_list->pViewFormats,
2168                sizeof(VkFormat) * format_list->viewFormatCount);
2169 
2170             break;
2171          }
2172          case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: {
2173             struct VkImageStencilUsageCreateInfo *stencil_usage =
2174                (struct VkImageStencilUsageCreateInfo *)src;
2175             _mesa_sha1_update(&sha1_ctx, &stencil_usage->stencilUsage,
2176                               sizeof(VkImageUsageFlags));
2177             break;
2178          }
2179          case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: {
2180             struct VkPhysicalDeviceExternalImageFormatInfo *ext_image =
2181                (struct VkPhysicalDeviceExternalImageFormatInfo *)src;
2182             _mesa_sha1_update(&sha1_ctx, &ext_image->handleType,
2183                               sizeof(VkExternalMemoryHandleTypeFlagBits));
2184             break;
2185          }
2186          case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: {
2187             struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT
2188                *modifier_info =
2189                   (struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT *)src;
2190             _mesa_sha1_update(&sha1_ctx, &modifier_info->drmFormatModifier,
2191                               sizeof(uint64_t));
2192             if (modifier_info->sharingMode == VK_SHARING_MODE_CONCURRENT) {
2193                _mesa_sha1_update(
2194                   &sha1_ctx, modifier_info->pQueueFamilyIndices,
2195                   sizeof(uint32_t) * modifier_info->queueFamilyIndexCount);
2196             }
2197             break;
2198          }
2199          case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: {
2200             struct VkPhysicalDeviceImageViewImageFormatInfoEXT *view_image =
2201                (struct VkPhysicalDeviceImageViewImageFormatInfoEXT *)src;
2202             _mesa_sha1_update(&sha1_ctx, &view_image->imageViewType,
2203                               sizeof(VkImageViewType));
2204             break;
2205          }
2206          default:
2207             physical_dev->image_format_cache.debug.cache_skip_count++;
2208             return false;
2209          }
2210       }
2211    }
2212 
2213    /* Hash pImageFormatProperties pNext as well since some of them are
2214     * optional in that they can be attached without a corresponding pNext
2215     * in pImageFormatInfo.
2216     *
2217     * VUID-VkImageFormatProperties2-pNext-pNext
2218     * Each pNext member of any structure (including this one) in the pNext
2219     * chain must be either NULL or a pointer to a valid instance of
2220     * VkAndroidHardwareBufferUsageANDROID, VkExternalImageFormatProperties,
2221     * VkFilterCubicImageViewImageFormatPropertiesEXT,
2222     * VkHostImageCopyDevicePerformanceQueryEXT,
2223     * VkImageCompressionPropertiesEXT,
2224     * VkSamplerYcbcrConversionImageFormatProperties, or
2225     * VkTextureLODGatherFormatPropertiesAMD
2226     *
2227     * VkAndroidHardwareBufferUsageANDROID is handled outside of the cache.
2228     * VkFilterCubicImageViewImageFormatPropertiesEXT,
2229     * VkHostImageCopyDevicePerformanceQueryEXT,
2230     * VkHostImageCopyDevicePerformanceQueryEXT,
2231     * VkTextureLODGatherFormatPropertiesAMD are not supported
2232     */
2233    if (format_props->pNext) {
2234       vk_foreach_struct_const(src, format_props->pNext) {
2235          switch (src->sType) {
2236          case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
2237          case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT:
2238          case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
2239             _mesa_sha1_update(&sha1_ctx, &src->sType,
2240                               sizeof(VkStructureType));
2241             break;
2242          default:
2243             physical_dev->image_format_cache.debug.cache_skip_count++;
2244             return false;
2245          }
2246       }
2247    }
2248 
2249    static const size_t format_info_2_hash_block_size =
2250       sizeof(VkFormat) + sizeof(VkImageType) + sizeof(VkImageTiling) +
2251       sizeof(VkImageUsageFlags) + sizeof(VkImageCreateFlags);
2252 
2253    _mesa_sha1_update(&sha1_ctx, &format_info->format,
2254                      format_info_2_hash_block_size);
2255    _mesa_sha1_final(&sha1_ctx, key);
2256 
2257    return true;
2258 }
2259 
2260 static bool
vn_image_init_format_from_cache(struct vn_physical_device * physical_dev,struct VkImageFormatProperties2 * pImageFormatProperties,VkResult * cached_result,uint8_t * key)2261 vn_image_init_format_from_cache(
2262    struct vn_physical_device *physical_dev,
2263    struct VkImageFormatProperties2 *pImageFormatProperties,
2264    VkResult *cached_result,
2265    uint8_t *key)
2266 {
2267    struct vn_image_format_properties_cache *cache =
2268       &physical_dev->image_format_cache;
2269 
2270    assert(cache->ht);
2271 
2272    simple_mtx_lock(&cache->mutex);
2273    struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key);
2274    if (hash_entry) {
2275       struct vn_image_format_cache_entry *cache_entry = hash_entry->data;
2276 
2277       /* Copy the properties even if the cached_result is not supported.
2278        * Per spec 1.3.275 "If the combination of parameters to
2279        * vkGetPhysicalDeviceImageFormatProperties2 is not supported by the
2280        * implementation for use in vkCreateImage, then all members of
2281        * imageFormatProperties will be filled with zero."
2282        */
2283       pImageFormatProperties->imageFormatProperties =
2284          cache_entry->properties.format.imageFormatProperties;
2285       *cached_result = cache_entry->properties.cached_result;
2286 
2287       if (pImageFormatProperties->pNext) {
2288          vk_foreach_struct_const(src, pImageFormatProperties->pNext) {
2289             switch (src->sType) {
2290             case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES: {
2291                struct VkExternalImageFormatProperties *ext_image =
2292                   (struct VkExternalImageFormatProperties *)src;
2293                ext_image->externalMemoryProperties =
2294                   cache_entry->properties.ext_image.externalMemoryProperties;
2295                break;
2296             }
2297             case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
2298                struct VkImageCompressionPropertiesEXT *compression =
2299                   (struct VkImageCompressionPropertiesEXT *)src;
2300                compression->imageCompressionFlags =
2301                   cache_entry->properties.compression.imageCompressionFlags;
2302                compression->imageCompressionFixedRateFlags =
2303                   cache_entry->properties.compression
2304                      .imageCompressionFixedRateFlags;
2305                break;
2306             }
2307             case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: {
2308                struct VkSamplerYcbcrConversionImageFormatProperties
2309                   *ycbcr_conversion =
2310                      (struct VkSamplerYcbcrConversionImageFormatProperties *)
2311                         src;
2312                ycbcr_conversion->combinedImageSamplerDescriptorCount =
2313                   cache_entry->properties.ycbcr_conversion
2314                      .combinedImageSamplerDescriptorCount;
2315                break;
2316             }
2317             default:
2318                unreachable("unexpected format props pNext");
2319             }
2320          }
2321       }
2322 
2323       list_move_to(&cache_entry->head, &cache->lru);
2324       p_atomic_inc(&cache->debug.cache_hit_count);
2325    } else {
2326       p_atomic_inc(&cache->debug.cache_miss_count);
2327    }
2328    simple_mtx_unlock(&cache->mutex);
2329 
2330    return !!hash_entry;
2331 }
2332 
2333 static void
vn_image_store_format_in_cache(struct vn_physical_device * physical_dev,uint8_t * key,struct VkImageFormatProperties2 * pImageFormatProperties,VkResult cached_result)2334 vn_image_store_format_in_cache(
2335    struct vn_physical_device *physical_dev,
2336    uint8_t *key,
2337    struct VkImageFormatProperties2 *pImageFormatProperties,
2338    VkResult cached_result)
2339 {
2340    const VkAllocationCallbacks *alloc =
2341       &physical_dev->base.base.instance->alloc;
2342    struct vn_image_format_properties_cache *cache =
2343       &physical_dev->image_format_cache;
2344    struct vn_image_format_cache_entry *cache_entry = NULL;
2345 
2346    assert(cache->ht);
2347 
2348    simple_mtx_lock(&cache->mutex);
2349 
2350    /* Check if entry was added before lock */
2351    if (_mesa_hash_table_search(cache->ht, key)) {
2352       simple_mtx_unlock(&cache->mutex);
2353       return;
2354    }
2355 
2356    if (_mesa_hash_table_num_entries(cache->ht) ==
2357        IMAGE_FORMAT_CACHE_MAX_ENTRIES) {
2358       /* Evict/use the last entry in the lru list for this new entry */
2359       cache_entry = list_last_entry(&cache->lru,
2360                                     struct vn_image_format_cache_entry, head);
2361 
2362       _mesa_hash_table_remove_key(cache->ht, cache_entry->key);
2363       list_del(&cache_entry->head);
2364    } else {
2365       cache_entry = vk_zalloc(alloc, sizeof(*cache_entry), VN_DEFAULT_ALIGN,
2366                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2367       if (!cache_entry) {
2368          simple_mtx_unlock(&cache->mutex);
2369          return;
2370       }
2371    }
2372 
2373    if (pImageFormatProperties->pNext) {
2374       vk_foreach_struct_const(src, pImageFormatProperties->pNext) {
2375          switch (src->sType) {
2376          case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES: {
2377             cache_entry->properties.ext_image =
2378                *((struct VkExternalImageFormatProperties *)src);
2379             break;
2380          }
2381          case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
2382             cache_entry->properties.compression =
2383                *((struct VkImageCompressionPropertiesEXT *)src);
2384             break;
2385          }
2386          case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: {
2387             cache_entry->properties.ycbcr_conversion =
2388                *((struct VkSamplerYcbcrConversionImageFormatProperties *)src);
2389             break;
2390          }
2391          default:
2392             unreachable("unexpected format props pNext");
2393          }
2394       }
2395    }
2396 
2397    cache_entry->properties.format = *pImageFormatProperties;
2398    cache_entry->properties.cached_result = cached_result;
2399 
2400    memcpy(cache_entry->key, key, SHA1_DIGEST_LENGTH);
2401 
2402    _mesa_hash_table_insert(cache->ht, cache_entry->key, cache_entry);
2403    list_add(&cache_entry->head, &cache->lru);
2404 
2405    simple_mtx_unlock(&cache->mutex);
2406 }
2407 
2408 VkResult
vn_GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)2409 vn_GetPhysicalDeviceImageFormatProperties2(
2410    VkPhysicalDevice physicalDevice,
2411    const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
2412    VkImageFormatProperties2 *pImageFormatProperties)
2413 {
2414    struct vn_physical_device *physical_dev =
2415       vn_physical_device_from_handle(physicalDevice);
2416    struct vn_ring *ring = physical_dev->instance->ring.ring;
2417    const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
2418       physical_dev->external_memory.renderer_handle_type;
2419    const VkExternalMemoryHandleTypeFlags supported_handle_types =
2420       physical_dev->external_memory.supported_handle_types;
2421 
2422    const struct wsi_image_create_info *wsi_info = vk_find_struct_const(
2423       pImageFormatInfo->pNext, WSI_IMAGE_CREATE_INFO_MESA);
2424    const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *modifier_info =
2425       vk_find_struct_const(
2426          pImageFormatInfo->pNext,
2427          PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT);
2428 
2429    /* force common wsi into choosing DRM_FORMAT_MOD_LINEAR or else fall back
2430     * to the legacy path, for which Venus also forces LINEAR for wsi images.
2431     */
2432    if (VN_PERF(NO_TILED_WSI_IMAGE)) {
2433       if (wsi_info && modifier_info &&
2434           modifier_info->drmFormatModifier != DRM_FORMAT_MOD_LINEAR) {
2435          if (VN_DEBUG(WSI)) {
2436             vn_log(physical_dev->instance,
2437                    "rejecting non-linear wsi image format modifier %" PRIu64,
2438                    modifier_info->drmFormatModifier);
2439          }
2440          return vn_error(physical_dev->instance,
2441                          VK_ERROR_FORMAT_NOT_SUPPORTED);
2442       }
2443    }
2444 
2445    /* Integration with Xwayland (using virgl-backed gbm) may only use
2446     * modifiers for which `memory_plane_count == format_plane_count` with the
2447     * distinction defined in the spec for VkDrmFormatModifierPropertiesEXT.
2448     *
2449     * The spec also states that:
2450     *   If an image is non-linear, then the partition of the image’s memory
2451     *   into memory planes is implementation-specific and may be unrelated to
2452     *   the partition of the image’s content into format planes.
2453     *
2454     * A modifier like I915_FORMAT_MOD_Y_TILED_CCS with an extra CCS
2455     * metadata-only _memory_ plane is not supported by virgl. In general,
2456     * since the partition of format planes into memory planes (even when their
2457     * counts match) cannot be guarantably known, the safest option is to limit
2458     * both plane counts to 1 while virgl may be involved.
2459     */
2460    if (wsi_info && modifier_info &&
2461        !physical_dev->instance->enable_wsi_multi_plane_modifiers &&
2462        modifier_info->drmFormatModifier != DRM_FORMAT_MOD_LINEAR) {
2463       const uint32_t plane_count =
2464          vn_modifier_plane_count(physical_dev, pImageFormatInfo->format,
2465                                  modifier_info->drmFormatModifier);
2466       if (plane_count != 1) {
2467          if (VN_DEBUG(WSI)) {
2468             vn_log(physical_dev->instance,
2469                    "rejecting multi-plane (%u) modifier %" PRIu64
2470                    " for wsi image with format %u",
2471                    plane_count, modifier_info->drmFormatModifier,
2472                    pImageFormatInfo->format);
2473          }
2474          return vn_error(physical_dev->instance,
2475                          VK_ERROR_FORMAT_NOT_SUPPORTED);
2476       }
2477    }
2478 
2479    const VkPhysicalDeviceExternalImageFormatInfo *external_info =
2480       vk_find_struct_const(pImageFormatInfo->pNext,
2481                            PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO);
2482    if (external_info && !external_info->handleType)
2483       external_info = NULL;
2484 
2485    struct vn_physical_device_image_format_info local_info;
2486    if (external_info) {
2487       if (!(external_info->handleType & supported_handle_types)) {
2488          return vn_error(physical_dev->instance,
2489                          VK_ERROR_FORMAT_NOT_SUPPORTED);
2490       }
2491 
2492       /* Check the image tiling against the renderer handle type:
2493        * - No need to check for AHB since the tiling will either be forwarded
2494        *   or overwritten based on the renderer external memory type.
2495        * - For opaque fd and dma_buf fd handle types, passthrough tiling when
2496        *   the renderer external memory is dma_buf. Then we can avoid
2497        *   reconstructing the structs to support drm format modifier tiling
2498        *   like how we support AHB.
2499        */
2500       if (external_info->handleType !=
2501           VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2502          if (renderer_handle_type ==
2503                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
2504              pImageFormatInfo->tiling !=
2505                 VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
2506             return vn_error(physical_dev->instance,
2507                             VK_ERROR_FORMAT_NOT_SUPPORTED);
2508          }
2509       }
2510 
2511       if (external_info->handleType != renderer_handle_type) {
2512          pImageFormatInfo = vn_physical_device_fix_image_format_info(
2513             pImageFormatInfo, renderer_handle_type, &local_info);
2514          if (!pImageFormatInfo) {
2515             return vn_error(physical_dev->instance,
2516                             VK_ERROR_FORMAT_NOT_SUPPORTED);
2517          }
2518       }
2519    }
2520 
2521    /* Since venus-protocol doesn't pass the wsi_image_create_info struct, we
2522     * must remove the ALIAS_BIT here and in vn_wsi_create_image().
2523     * ANV rejects the bit for external+nonlinear images that don't have WSI
2524     * info chained.
2525     */
2526    if (wsi_info && physical_dev->renderer_driver_id ==
2527                       VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA) {
2528       if (pImageFormatInfo != &local_info.format) {
2529          local_info.format = *pImageFormatInfo;
2530          pImageFormatInfo = &local_info.format;
2531       }
2532       local_info.format.flags &= ~VK_IMAGE_CREATE_ALIAS_BIT;
2533    }
2534 
2535    /* Check if image format props is in the cache. */
2536    uint8_t key[SHA1_DIGEST_LENGTH] = { 0 };
2537    const bool cacheable = vn_image_get_image_format_key(
2538       physical_dev, pImageFormatInfo, pImageFormatProperties, key);
2539 
2540    VkResult result = VK_SUCCESS;
2541    if (!(cacheable &&
2542          vn_image_init_format_from_cache(physical_dev, pImageFormatProperties,
2543                                          &result, key))) {
2544       result = vn_call_vkGetPhysicalDeviceImageFormatProperties2(
2545          ring, physicalDevice, pImageFormatInfo, pImageFormatProperties);
2546 
2547       /* If cacheable, cache successful and unsupported results. */
2548       if (cacheable &&
2549           (result == VK_SUCCESS || result == VK_ERROR_FORMAT_NOT_SUPPORTED ||
2550            result == VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR)) {
2551          vn_image_store_format_in_cache(physical_dev, key,
2552                                         pImageFormatProperties, result);
2553       }
2554    }
2555 
2556    if (result != VK_SUCCESS || !external_info)
2557       return vn_result(physical_dev->instance, result);
2558 
2559    if (external_info->handleType ==
2560        VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2561       VkAndroidHardwareBufferUsageANDROID *ahb_usage =
2562          vk_find_struct(pImageFormatProperties->pNext,
2563                         ANDROID_HARDWARE_BUFFER_USAGE_ANDROID);
2564       if (ahb_usage) {
2565          ahb_usage->androidHardwareBufferUsage = vk_image_usage_to_ahb_usage(
2566             pImageFormatInfo->flags, pImageFormatInfo->usage);
2567       }
2568 
2569       /* AHBs with mipmap usage will ignore this property */
2570       pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
2571    }
2572 
2573    VkExternalImageFormatProperties *img_props = vk_find_struct(
2574       pImageFormatProperties->pNext, EXTERNAL_IMAGE_FORMAT_PROPERTIES);
2575    if (!img_props)
2576       return VK_SUCCESS;
2577 
2578    VkExternalMemoryProperties *mem_props =
2579       &img_props->externalMemoryProperties;
2580 
2581    if (renderer_handle_type ==
2582           VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
2583        !physical_dev->instance->renderer->info.has_dma_buf_import) {
2584       mem_props->externalMemoryFeatures &=
2585          ~VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2586    }
2587 
2588    if (external_info->handleType ==
2589        VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2590       /* AHB backed image requires renderer to support import bit */
2591       if (!(mem_props->externalMemoryFeatures &
2592             VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))
2593          return vn_error(physical_dev->instance,
2594                          VK_ERROR_FORMAT_NOT_SUPPORTED);
2595 
2596       mem_props->externalMemoryFeatures =
2597          VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT |
2598          VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
2599          VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2600       mem_props->exportFromImportedHandleTypes =
2601          VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2602       mem_props->compatibleHandleTypes =
2603          VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2604    } else {
2605       mem_props->compatibleHandleTypes = supported_handle_types;
2606       mem_props->exportFromImportedHandleTypes =
2607          (mem_props->exportFromImportedHandleTypes & renderer_handle_type)
2608             ? supported_handle_types
2609             : 0;
2610    }
2611 
2612    return VK_SUCCESS;
2613 }
2614 
2615 void
vn_GetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)2616 vn_GetPhysicalDeviceSparseImageFormatProperties2(
2617    VkPhysicalDevice physicalDevice,
2618    const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo,
2619    uint32_t *pPropertyCount,
2620    VkSparseImageFormatProperties2 *pProperties)
2621 {
2622 
2623    struct vn_physical_device *physical_dev =
2624       vn_physical_device_from_handle(physicalDevice);
2625    struct vn_ring *ring = physical_dev->instance->ring.ring;
2626    /* If VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT is not supported for the given
2627     * arguments, pPropertyCount will be set to zero upon return, and no data
2628     * will be written to pProperties.
2629     */
2630    if (physical_dev->sparse_binding_disabled) {
2631       *pPropertyCount = 0;
2632       return;
2633    }
2634 
2635    /* TODO per-device cache */
2636    vn_call_vkGetPhysicalDeviceSparseImageFormatProperties2(
2637       ring, physicalDevice, pFormatInfo, pPropertyCount, pProperties);
2638 }
2639 
2640 void
vn_GetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)2641 vn_GetPhysicalDeviceExternalBufferProperties(
2642    VkPhysicalDevice physicalDevice,
2643    const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
2644    VkExternalBufferProperties *pExternalBufferProperties)
2645 {
2646    struct vn_physical_device *physical_dev =
2647       vn_physical_device_from_handle(physicalDevice);
2648    struct vn_ring *ring = physical_dev->instance->ring.ring;
2649    const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
2650       physical_dev->external_memory.renderer_handle_type;
2651    const VkExternalMemoryHandleTypeFlags supported_handle_types =
2652       physical_dev->external_memory.supported_handle_types;
2653    const bool is_ahb =
2654       pExternalBufferInfo->handleType ==
2655       VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2656 
2657    VkExternalMemoryProperties *props =
2658       &pExternalBufferProperties->externalMemoryProperties;
2659    if (!(pExternalBufferInfo->handleType & supported_handle_types)) {
2660       props->compatibleHandleTypes = pExternalBufferInfo->handleType;
2661       props->exportFromImportedHandleTypes = 0;
2662       props->externalMemoryFeatures = 0;
2663       return;
2664    }
2665 
2666    VkPhysicalDeviceExternalBufferInfo local_info;
2667    if (pExternalBufferInfo->handleType != renderer_handle_type) {
2668       local_info = *pExternalBufferInfo;
2669       local_info.handleType = renderer_handle_type;
2670       pExternalBufferInfo = &local_info;
2671    }
2672 
2673    /* TODO per-device cache */
2674    vn_call_vkGetPhysicalDeviceExternalBufferProperties(
2675       ring, physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
2676 
2677    if (renderer_handle_type ==
2678           VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
2679        !physical_dev->instance->renderer->info.has_dma_buf_import) {
2680       props->externalMemoryFeatures &=
2681          ~VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2682    }
2683 
2684    if (is_ahb) {
2685       props->compatibleHandleTypes =
2686          VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2687       /* AHB backed buffer requires renderer to support import bit while it
2688        * also requires the renderer to must not advertise dedicated only bit
2689        */
2690       if (!(props->externalMemoryFeatures &
2691             VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT) ||
2692           (props->externalMemoryFeatures &
2693            VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT)) {
2694          props->externalMemoryFeatures = 0;
2695          props->exportFromImportedHandleTypes = 0;
2696          return;
2697       }
2698       props->externalMemoryFeatures =
2699          VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
2700          VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2701       props->exportFromImportedHandleTypes =
2702          VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2703    } else {
2704       props->compatibleHandleTypes = supported_handle_types;
2705       props->exportFromImportedHandleTypes =
2706          (props->exportFromImportedHandleTypes & renderer_handle_type)
2707             ? supported_handle_types
2708             : 0;
2709    }
2710 }
2711 
2712 void
vn_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)2713 vn_GetPhysicalDeviceExternalFenceProperties(
2714    VkPhysicalDevice physicalDevice,
2715    const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
2716    VkExternalFenceProperties *pExternalFenceProperties)
2717 {
2718    struct vn_physical_device *physical_dev =
2719       vn_physical_device_from_handle(physicalDevice);
2720 
2721    if (pExternalFenceInfo->handleType &
2722        physical_dev->external_fence_handles) {
2723       pExternalFenceProperties->compatibleHandleTypes =
2724          physical_dev->external_fence_handles;
2725       pExternalFenceProperties->exportFromImportedHandleTypes =
2726          physical_dev->external_fence_handles;
2727       pExternalFenceProperties->externalFenceFeatures =
2728          VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT |
2729          VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT;
2730    } else {
2731       pExternalFenceProperties->compatibleHandleTypes = 0;
2732       pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2733       pExternalFenceProperties->externalFenceFeatures = 0;
2734    }
2735 }
2736 
2737 void
vn_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)2738 vn_GetPhysicalDeviceExternalSemaphoreProperties(
2739    VkPhysicalDevice physicalDevice,
2740    const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
2741    VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
2742 {
2743    struct vn_physical_device *physical_dev =
2744       vn_physical_device_from_handle(physicalDevice);
2745 
2746    const VkSemaphoreTypeCreateInfo *type_info = vk_find_struct_const(
2747       pExternalSemaphoreInfo->pNext, SEMAPHORE_TYPE_CREATE_INFO);
2748    const VkSemaphoreType sem_type =
2749       type_info ? type_info->semaphoreType : VK_SEMAPHORE_TYPE_BINARY;
2750    const VkExternalSemaphoreHandleTypeFlags valid_handles =
2751       sem_type == VK_SEMAPHORE_TYPE_BINARY
2752          ? physical_dev->external_binary_semaphore_handles
2753          : physical_dev->external_timeline_semaphore_handles;
2754    if (pExternalSemaphoreInfo->handleType & valid_handles) {
2755       pExternalSemaphoreProperties->compatibleHandleTypes = valid_handles;
2756       pExternalSemaphoreProperties->exportFromImportedHandleTypes =
2757          valid_handles;
2758       pExternalSemaphoreProperties->externalSemaphoreFeatures =
2759          VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
2760          VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
2761    } else {
2762       pExternalSemaphoreProperties->compatibleHandleTypes = 0;
2763       pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
2764       pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
2765    }
2766 }
2767 
2768 VkResult
vn_GetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice,uint32_t * pTimeDomainCount,VkTimeDomainEXT * pTimeDomains)2769 vn_GetPhysicalDeviceCalibrateableTimeDomainsEXT(
2770    VkPhysicalDevice physicalDevice,
2771    uint32_t *pTimeDomainCount,
2772    VkTimeDomainEXT *pTimeDomains)
2773 {
2774    struct vn_physical_device *physical_dev =
2775       vn_physical_device_from_handle(physicalDevice);
2776    struct vn_ring *ring = physical_dev->instance->ring.ring;
2777 
2778    return vn_call_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
2779       ring, physicalDevice, pTimeDomainCount, pTimeDomains);
2780 }
2781