• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2019 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "lvp_private.h"
25 #include "lvp_conv.h"
26 
27 #include "pipe-loader/pipe_loader.h"
28 #include "git_sha1.h"
29 #include "vk_cmd_enqueue_entrypoints.h"
30 #include "vk_sampler.h"
31 #include "vk_util.h"
32 #include "util/detect.h"
33 #include "pipe/p_defines.h"
34 #include "pipe/p_state.h"
35 #include "pipe/p_context.h"
36 #include "frontend/drisw_api.h"
37 
38 #include "util/u_inlines.h"
39 #include "util/os_memory.h"
40 #include "util/os_time.h"
41 #include "util/u_thread.h"
42 #include "util/u_atomic.h"
43 #include "util/timespec.h"
44 #include "util/ptralloc.h"
45 #include "nir.h"
46 #include "nir_builder.h"
47 
48 #if DETECT_OS_LINUX
49 #include <sys/mman.h>
50 #endif
51 
52 #if defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
53     defined(VK_USE_PLATFORM_WIN32_KHR) || \
54     defined(VK_USE_PLATFORM_XCB_KHR) || \
55     defined(VK_USE_PLATFORM_XLIB_KHR)
56 #define LVP_USE_WSI_PLATFORM
57 #endif
58 #define LVP_API_VERSION VK_MAKE_VERSION(1, 3, VK_HEADER_VERSION)
59 
lvp_EnumerateInstanceVersion(uint32_t * pApiVersion)60 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceVersion(uint32_t* pApiVersion)
61 {
62    *pApiVersion = LVP_API_VERSION;
63    return VK_SUCCESS;
64 }
65 
66 static const struct vk_instance_extension_table lvp_instance_extensions_supported = {
67    .KHR_device_group_creation                = true,
68    .KHR_external_fence_capabilities          = true,
69    .KHR_external_memory_capabilities         = true,
70    .KHR_external_semaphore_capabilities      = true,
71    .KHR_get_physical_device_properties2      = true,
72    .EXT_debug_report                         = true,
73    .EXT_debug_utils                          = true,
74 #ifdef LVP_USE_WSI_PLATFORM
75    .KHR_get_surface_capabilities2            = true,
76    .KHR_surface                              = true,
77    .KHR_surface_protected_capabilities       = true,
78 #endif
79 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
80    .KHR_wayland_surface                      = true,
81 #endif
82 #ifdef VK_USE_PLATFORM_WIN32_KHR
83    .KHR_win32_surface                        = true,
84 #endif
85 #ifdef VK_USE_PLATFORM_XCB_KHR
86    .KHR_xcb_surface                          = true,
87 #endif
88 #ifdef VK_USE_PLATFORM_XLIB_KHR
89    .KHR_xlib_surface                         = true,
90 #endif
91 #ifndef VK_USE_PLATFORM_WIN32_KHR
92    .EXT_headless_surface                     = true,
93 #endif
94 };
95 
96 static const struct vk_device_extension_table lvp_device_extensions_supported = {
97    .KHR_8bit_storage                      = true,
98    .KHR_16bit_storage                     = true,
99    .KHR_bind_memory2                      = true,
100    .KHR_buffer_device_address             = true,
101    .KHR_create_renderpass2                = true,
102    .KHR_copy_commands2                    = true,
103    .KHR_dedicated_allocation              = true,
104    .KHR_depth_stencil_resolve             = true,
105    .KHR_descriptor_update_template        = true,
106    .KHR_device_group                      = true,
107    .KHR_draw_indirect_count               = true,
108    .KHR_driver_properties                 = true,
109    .KHR_dynamic_rendering                 = true,
110    .KHR_dynamic_rendering_local_read      = true,
111    .KHR_format_feature_flags2             = true,
112    .KHR_external_fence                    = true,
113    .KHR_external_memory                   = true,
114 #ifdef PIPE_MEMORY_FD
115    .KHR_external_memory_fd                = true,
116 #endif
117    .KHR_external_semaphore                = true,
118    .KHR_shader_float_controls             = true,
119    .KHR_get_memory_requirements2          = true,
120 #ifdef LVP_USE_WSI_PLATFORM
121    .KHR_incremental_present               = true,
122 #endif
123    .KHR_image_format_list                 = true,
124    .KHR_imageless_framebuffer             = true,
125    .KHR_index_type_uint8                  = true,
126    .KHR_line_rasterization                = true,
127    .KHR_load_store_op_none                = true,
128    .KHR_maintenance1                      = true,
129    .KHR_maintenance2                      = true,
130    .KHR_maintenance3                      = true,
131    .KHR_maintenance4                      = true,
132    .KHR_maintenance5                      = true,
133    .KHR_maintenance6                      = true,
134    .KHR_map_memory2                       = true,
135    .KHR_multiview                         = true,
136    .KHR_push_descriptor                   = true,
137    .KHR_pipeline_library                  = true,
138    .KHR_relaxed_block_layout              = true,
139    .KHR_sampler_mirror_clamp_to_edge      = true,
140    .KHR_sampler_ycbcr_conversion          = true,
141    .KHR_separate_depth_stencil_layouts    = true,
142    .KHR_shader_atomic_int64               = true,
143    .KHR_shader_clock                      = true,
144    .KHR_shader_draw_parameters            = true,
145    .KHR_shader_expect_assume              = true,
146    .KHR_shader_float16_int8               = true,
147    .KHR_shader_integer_dot_product        = true,
148    .KHR_shader_maximal_reconvergence      = true,
149    .KHR_shader_non_semantic_info          = true,
150    .KHR_shader_subgroup_extended_types    = true,
151    .KHR_shader_terminate_invocation       = true,
152    .KHR_spirv_1_4                         = true,
153    .KHR_storage_buffer_storage_class      = true,
154 #ifdef LVP_USE_WSI_PLATFORM
155    .KHR_swapchain                         = true,
156    .KHR_swapchain_mutable_format          = true,
157 #endif
158    .KHR_synchronization2                  = true,
159    .KHR_timeline_semaphore                = true,
160    .KHR_uniform_buffer_standard_layout    = true,
161    .KHR_variable_pointers                 = true,
162    .KHR_vertex_attribute_divisor          = true,
163    .KHR_vulkan_memory_model               = true,
164    .KHR_zero_initialize_workgroup_memory  = true,
165    .ARM_rasterization_order_attachment_access = true,
166    .EXT_4444_formats                      = true,
167    .EXT_attachment_feedback_loop_layout   = true,
168    .EXT_attachment_feedback_loop_dynamic_state = true,
169    .EXT_border_color_swizzle              = true,
170    .EXT_calibrated_timestamps             = true,
171    .EXT_color_write_enable                = true,
172    .EXT_conditional_rendering             = true,
173    .EXT_depth_clip_enable                 = true,
174    .EXT_depth_clip_control                = true,
175    .EXT_depth_range_unrestricted          = true,
176    .EXT_dynamic_rendering_unused_attachments = true,
177    .EXT_descriptor_buffer                 = true,
178    .EXT_descriptor_indexing               = true,
179    .EXT_extended_dynamic_state            = true,
180    .EXT_extended_dynamic_state2           = true,
181    .EXT_extended_dynamic_state3           = true,
182    .EXT_external_memory_host              = true,
183    .EXT_graphics_pipeline_library         = true,
184    .EXT_host_image_copy                   = true,
185    .EXT_host_query_reset                  = true,
186    .EXT_image_2d_view_of_3d               = true,
187    .EXT_image_sliced_view_of_3d           = true,
188    .EXT_image_robustness                  = true,
189    .EXT_index_type_uint8                  = true,
190    .EXT_inline_uniform_block              = true,
191    .EXT_load_store_op_none                = true,
192    .EXT_memory_budget                     = true,
193 #if DETECT_OS_LINUX
194    .EXT_memory_priority                   = true,
195 #endif
196    .EXT_mesh_shader                       = true,
197    .EXT_multisampled_render_to_single_sampled = true,
198    .EXT_multi_draw                        = true,
199    .EXT_mutable_descriptor_type           = true,
200    .EXT_nested_command_buffer             = true,
201    .EXT_non_seamless_cube_map             = true,
202 #if DETECT_OS_LINUX
203    .EXT_pageable_device_local_memory      = true,
204 #endif
205    .EXT_pipeline_creation_feedback        = true,
206    .EXT_pipeline_creation_cache_control   = true,
207    .EXT_post_depth_coverage               = true,
208    .EXT_private_data                      = true,
209    .EXT_primitives_generated_query        = true,
210    .EXT_primitive_topology_list_restart   = true,
211    .EXT_rasterization_order_attachment_access = true,
212    .EXT_sampler_filter_minmax             = true,
213    .EXT_scalar_block_layout               = true,
214    .EXT_separate_stencil_usage            = true,
215    .EXT_shader_atomic_float               = true,
216    .EXT_shader_atomic_float2              = true,
217    .EXT_shader_demote_to_helper_invocation= true,
218    .EXT_shader_object                     = true,
219    .EXT_shader_stencil_export             = true,
220    .EXT_shader_subgroup_ballot            = true,
221    .EXT_shader_subgroup_vote              = true,
222    .EXT_shader_viewport_index_layer       = true,
223    .EXT_subgroup_size_control             = true,
224    .EXT_texel_buffer_alignment            = true,
225    .EXT_transform_feedback                = true,
226    .EXT_vertex_attribute_divisor          = true,
227    .EXT_vertex_input_dynamic_state        = true,
228    .EXT_ycbcr_image_arrays                = true,
229    .EXT_ycbcr_2plane_444_formats          = true,
230    .EXT_custom_border_color               = true,
231    .EXT_provoking_vertex                  = true,
232    .EXT_line_rasterization                = true,
233    .EXT_robustness2                       = true,
234    .AMDX_shader_enqueue                   = true,
235    .GOOGLE_decorate_string                = true,
236    .GOOGLE_hlsl_functionality1            = true,
237    .NV_device_generated_commands          = true,
238 };
239 
240 static int
min_vertex_pipeline_param(struct pipe_screen * pscreen,enum pipe_shader_cap param)241 min_vertex_pipeline_param(struct pipe_screen *pscreen, enum pipe_shader_cap param)
242 {
243    int val = INT_MAX;
244    for (int i = 0; i < MESA_SHADER_COMPUTE; ++i) {
245       if (i == MESA_SHADER_FRAGMENT ||
246           !pscreen->get_shader_param(pscreen, i,
247                                      PIPE_SHADER_CAP_MAX_INSTRUCTIONS))
248          continue;
249 
250       val = MAX2(val, pscreen->get_shader_param(pscreen, i, param));
251    }
252    return val;
253 }
254 
255 static int
min_shader_param(struct pipe_screen * pscreen,enum pipe_shader_cap param)256 min_shader_param(struct pipe_screen *pscreen, enum pipe_shader_cap param)
257 {
258    return MIN3(min_vertex_pipeline_param(pscreen, param),
259                pscreen->get_shader_param(pscreen, MESA_SHADER_FRAGMENT, param),
260                pscreen->get_shader_param(pscreen, MESA_SHADER_COMPUTE, param));
261 }
262 
263 static void
lvp_get_features(const struct lvp_physical_device * pdevice,struct vk_features * features)264 lvp_get_features(const struct lvp_physical_device *pdevice,
265                  struct vk_features *features)
266 {
267    bool instance_divisor = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_VERTEX_ELEMENT_INSTANCE_DIVISOR) != 0;
268 
269    *features = (struct vk_features){
270       /* Vulkan 1.0 */
271       .robustBufferAccess                       = true,
272       .fullDrawIndexUint32                      = true,
273       .imageCubeArray                           = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CUBE_MAP_ARRAY) != 0),
274       .independentBlend                         = true,
275       .geometryShader                           = (pdevice->pscreen->get_shader_param(pdevice->pscreen, MESA_SHADER_GEOMETRY, PIPE_SHADER_CAP_MAX_INSTRUCTIONS) != 0),
276       .tessellationShader                       = (pdevice->pscreen->get_shader_param(pdevice->pscreen, MESA_SHADER_TESS_EVAL, PIPE_SHADER_CAP_MAX_INSTRUCTIONS) != 0),
277       .sampleRateShading                        = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_SAMPLE_SHADING) != 0),
278       .dualSrcBlend                             = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_DUAL_SOURCE_RENDER_TARGETS) != 0),
279       .logicOp                                  = true,
280       .multiDrawIndirect                        = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MULTI_DRAW_INDIRECT) != 0),
281       .drawIndirectFirstInstance                = true,
282       .depthClamp                               = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_CLIP_DISABLE) != 0),
283       .depthBiasClamp                           = true,
284       .fillModeNonSolid                         = true,
285       .depthBounds                              = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_BOUNDS_TEST) != 0),
286       .wideLines                                = true,
287       .largePoints                              = true,
288       .alphaToOne                               = true,
289       .multiViewport                            = true,
290       .samplerAnisotropy                        = true,
291       .textureCompressionETC2                   = false,
292       .textureCompressionASTC_LDR               = false,
293       .textureCompressionBC                     = true,
294       .occlusionQueryPrecise                    = true,
295       .pipelineStatisticsQuery                  = true,
296       .vertexPipelineStoresAndAtomics           = (min_vertex_pipeline_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) != 0),
297       .fragmentStoresAndAtomics                 = (pdevice->pscreen->get_shader_param(pdevice->pscreen, MESA_SHADER_FRAGMENT, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) != 0),
298       .shaderTessellationAndGeometryPointSize   = true,
299       .shaderImageGatherExtended                = true,
300       .shaderStorageImageExtendedFormats        = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_IMAGES) != 0),
301       .shaderStorageImageMultisample            = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_TEXTURE_MULTISAMPLE) != 0),
302       .shaderUniformBufferArrayDynamicIndexing  = true,
303       .shaderSampledImageArrayDynamicIndexing   = true,
304       .shaderStorageBufferArrayDynamicIndexing  = true,
305       .shaderStorageImageArrayDynamicIndexing   = true,
306       .shaderStorageImageReadWithoutFormat      = true,
307       .shaderStorageImageWriteWithoutFormat     = true,
308       .shaderClipDistance                       = true,
309       .shaderCullDistance                       = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CULL_DISTANCE) == 1),
310       .shaderFloat64                            = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DOUBLES) == 1),
311       .shaderInt64                              = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_INT64) == 1),
312       .shaderInt16                              = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_INT16) == 1),
313       .variableMultisampleRate                  = false,
314       .inheritedQueries                         = false,
315 
316       /* Vulkan 1.1 */
317       .storageBuffer16BitAccess            = true,
318       .uniformAndStorageBuffer16BitAccess  = true,
319       .storagePushConstant16               = true,
320       .storageInputOutput16                = false,
321       .multiview                           = true,
322       .multiviewGeometryShader             = true,
323       .multiviewTessellationShader         = true,
324       .variablePointersStorageBuffer       = true,
325       .variablePointers                    = true,
326       .protectedMemory                     = false,
327       .samplerYcbcrConversion              = true,
328       .shaderDrawParameters                = true,
329 
330       /* Vulkan 1.2 */
331       .samplerMirrorClampToEdge = true,
332       .drawIndirectCount = true,
333       .storageBuffer8BitAccess = true,
334       .uniformAndStorageBuffer8BitAccess = true,
335       .storagePushConstant8 = true,
336       .shaderBufferInt64Atomics = true,
337       .shaderSharedInt64Atomics = true,
338       .shaderFloat16 = pdevice->pscreen->get_shader_param(pdevice->pscreen, MESA_SHADER_FRAGMENT, PIPE_SHADER_CAP_FP16) != 0,
339       .shaderInt8 = true,
340 
341       .descriptorIndexing = true,
342       .shaderInputAttachmentArrayDynamicIndexing = true,
343       .shaderUniformTexelBufferArrayDynamicIndexing = true,
344       .shaderStorageTexelBufferArrayDynamicIndexing = true,
345       .shaderUniformBufferArrayNonUniformIndexing = true,
346       .shaderSampledImageArrayNonUniformIndexing = true,
347       .shaderStorageBufferArrayNonUniformIndexing = true,
348       .shaderStorageImageArrayNonUniformIndexing = true,
349       .shaderInputAttachmentArrayNonUniformIndexing = true,
350       .shaderUniformTexelBufferArrayNonUniformIndexing = true,
351       .shaderStorageTexelBufferArrayNonUniformIndexing = true,
352       .descriptorBindingUniformBufferUpdateAfterBind = true,
353       .descriptorBindingSampledImageUpdateAfterBind = true,
354       .descriptorBindingStorageImageUpdateAfterBind = true,
355       .descriptorBindingStorageBufferUpdateAfterBind = true,
356       .descriptorBindingUniformTexelBufferUpdateAfterBind = true,
357       .descriptorBindingStorageTexelBufferUpdateAfterBind = true,
358       .descriptorBindingUpdateUnusedWhilePending = true,
359       .descriptorBindingPartiallyBound = true,
360       .descriptorBindingVariableDescriptorCount = true,
361       .runtimeDescriptorArray = true,
362 
363       .samplerFilterMinmax = true,
364       .scalarBlockLayout = true,
365       .imagelessFramebuffer = true,
366       .uniformBufferStandardLayout = true,
367       .shaderSubgroupExtendedTypes = true,
368       .separateDepthStencilLayouts = true,
369       .hostQueryReset = true,
370       .timelineSemaphore = true,
371       .bufferDeviceAddress = true,
372       .bufferDeviceAddressCaptureReplay = false,
373       .bufferDeviceAddressMultiDevice = false,
374       .vulkanMemoryModel = true,
375       .vulkanMemoryModelDeviceScope = true,
376       .vulkanMemoryModelAvailabilityVisibilityChains = true,
377       .shaderOutputViewportIndex = true,
378       .shaderOutputLayer = true,
379       .subgroupBroadcastDynamicId = true,
380 
381       /* Vulkan 1.3 */
382       .robustImageAccess = true,
383       .inlineUniformBlock = true,
384       .descriptorBindingInlineUniformBlockUpdateAfterBind = true,
385       .pipelineCreationCacheControl = true,
386       .privateData = true,
387       .shaderDemoteToHelperInvocation = true,
388       .shaderTerminateInvocation = true,
389       .subgroupSizeControl = true,
390       .computeFullSubgroups = true,
391       .synchronization2 = true,
392       .textureCompressionASTC_HDR = VK_FALSE,
393       .shaderZeroInitializeWorkgroupMemory = true,
394       .dynamicRendering = true,
395       .shaderIntegerDotProduct = true,
396       .maintenance4 = true,
397 
398       /* VK_EXT_descriptor_buffer */
399       .descriptorBuffer = true,
400       .descriptorBufferCaptureReplay = false,
401       .descriptorBufferPushDescriptors = true,
402       .descriptorBufferImageLayoutIgnored = true,
403 
404       /* VK_EXT_primitives_generated_query */
405       .primitivesGeneratedQuery = true,
406       .primitivesGeneratedQueryWithRasterizerDiscard = true,
407       .primitivesGeneratedQueryWithNonZeroStreams = true,
408 
409       /* VK_EXT_border_color_swizzle */
410       .borderColorSwizzle = true,
411       .borderColorSwizzleFromImage = true,
412 
413       /* VK_EXT_non_seamless_cube_map */
414       .nonSeamlessCubeMap = true,
415 
416       /* VK_EXT_attachment_feedback_loop_layout */
417       .attachmentFeedbackLoopLayout = true,
418 
419       /* VK_EXT_rasterization_order_attachment_access */
420       .rasterizationOrderColorAttachmentAccess = true,
421       .rasterizationOrderDepthAttachmentAccess = true,
422       .rasterizationOrderStencilAttachmentAccess = true,
423 
424       /* VK_EXT_line_rasterization */
425       .rectangularLines = true,
426       .bresenhamLines = true,
427       .smoothLines = true,
428       .stippledRectangularLines = true,
429       .stippledBresenhamLines = true,
430       .stippledSmoothLines = true,
431 
432       /* VK_EXT_vertex_attribute_divisor */
433       .vertexAttributeInstanceRateZeroDivisor = instance_divisor,
434       .vertexAttributeInstanceRateDivisor = instance_divisor,
435 
436       /* VK_EXT_multisampled_render_to_single_sampled */
437       .multisampledRenderToSingleSampled = true,
438 
439       /* VK_EXT_mutable_descriptor_type */
440       .mutableDescriptorType = true,
441 
442       /* VK_EXT_index_type_uint8 */
443       .indexTypeUint8 = true,
444 
445       /* VK_EXT_vertex_input_dynamic_state */
446       .vertexInputDynamicState = true,
447 
448       /* VK_EXT_image_sliced_view_of_3d */
449       .imageSlicedViewOf3D = true,
450 
451       /* VK_EXT_depth_clip_control */
452       .depthClipControl = true,
453 
454       /* VK_EXT_attachment_feedback_loop_layout_dynamic_state */
455       .attachmentFeedbackLoopDynamicState = true,
456 
457       /* VK_EXT_shader_object */
458       .shaderObject = true,
459 
460       /* VK_KHR_shader_clock */
461       .shaderSubgroupClock = true,
462       .shaderDeviceClock = true,
463 
464       /* VK_EXT_texel_buffer_alignment */
465       .texelBufferAlignment = true,
466 
467       /* VK_EXT_transform_feedback */
468       .transformFeedback = true,
469       .geometryStreams = true,
470 
471       /* VK_EXT_conditional_rendering */
472       .conditionalRendering = true,
473       .inheritedConditionalRendering = false,
474 
475       /* VK_EXT_extended_dynamic_state */
476       .extendedDynamicState = true,
477 
478       /* VK_EXT_4444_formats */
479       .formatA4R4G4B4 = true,
480       .formatA4B4G4R4 = true,
481 
482       /* VK_EXT_custom_border_color */
483       .customBorderColors = true,
484       .customBorderColorWithoutFormat = true,
485 
486       /* VK_EXT_color_write_enable */
487       .colorWriteEnable = true,
488 
489       /* VK_EXT_image_2d_view_of_3d  */
490       .image2DViewOf3D = true,
491       .sampler2DViewOf3D = true,
492 
493       /* VK_EXT_provoking_vertex */
494       .provokingVertexLast = true,
495       .transformFeedbackPreservesProvokingVertex = true,
496 
497       /* VK_EXT_multi_draw */
498       .multiDraw = true,
499 
500       /* VK_EXT_depth_clip_enable */
501       .depthClipEnable = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_CLAMP_ENABLE) != 0),
502 
503       /* VK_EXT_extended_dynamic_state2 */
504       .extendedDynamicState2 = true,
505       .extendedDynamicState2LogicOp = true,
506       .extendedDynamicState2PatchControlPoints = true,
507 
508       /* VK_EXT_extended_dynamic_state3 */
509       .extendedDynamicState3PolygonMode = true,
510       .extendedDynamicState3TessellationDomainOrigin = true,
511       .extendedDynamicState3DepthClampEnable = true,
512       .extendedDynamicState3DepthClipEnable = true,
513       .extendedDynamicState3LogicOpEnable = true,
514       .extendedDynamicState3SampleMask = true,
515       .extendedDynamicState3RasterizationSamples = true,
516       .extendedDynamicState3AlphaToCoverageEnable = true,
517       .extendedDynamicState3AlphaToOneEnable = true,
518       .extendedDynamicState3DepthClipNegativeOneToOne = true,
519       .extendedDynamicState3RasterizationStream = false,
520       .extendedDynamicState3ConservativeRasterizationMode = false,
521       .extendedDynamicState3ExtraPrimitiveOverestimationSize = false,
522       .extendedDynamicState3LineRasterizationMode = true,
523       .extendedDynamicState3LineStippleEnable = true,
524       .extendedDynamicState3ProvokingVertexMode = true,
525       .extendedDynamicState3SampleLocationsEnable = false,
526       .extendedDynamicState3ColorBlendEnable = true,
527       .extendedDynamicState3ColorBlendEquation = true,
528       .extendedDynamicState3ColorWriteMask = true,
529       .extendedDynamicState3ViewportWScalingEnable = false,
530       .extendedDynamicState3ViewportSwizzle = false,
531       .extendedDynamicState3ShadingRateImageEnable = false,
532       .extendedDynamicState3CoverageToColorEnable = false,
533       .extendedDynamicState3CoverageToColorLocation = false,
534       .extendedDynamicState3CoverageModulationMode = false,
535       .extendedDynamicState3CoverageModulationTableEnable = false,
536       .extendedDynamicState3CoverageModulationTable = false,
537       .extendedDynamicState3CoverageReductionMode = false,
538       .extendedDynamicState3RepresentativeFragmentTestEnable = false,
539       .extendedDynamicState3ColorBlendAdvanced = false,
540 
541       /* VK_EXT_dynamic_rendering_unused_attachments */
542       .dynamicRenderingUnusedAttachments = true,
543 
544       /* VK_EXT_robustness2 */
545       .robustBufferAccess2 = true,
546       .robustImageAccess2 = true,
547       .nullDescriptor = true,
548 
549       /* VK_NV_device_generated_commands */
550       .deviceGeneratedCommands = true,
551 
552       /* VK_EXT_primitive_topology_list_restart */
553       .primitiveTopologyListRestart = true,
554       .primitiveTopologyPatchListRestart = true,
555 
556       /* VK_EXT_graphics_pipeline_library */
557       .graphicsPipelineLibrary = true,
558 
559       /* VK_EXT_shader_atomic_float */
560       .shaderBufferFloat32Atomics =    true,
561       .shaderBufferFloat32AtomicAdd =  true,
562       .shaderBufferFloat64Atomics =    false,
563       .shaderBufferFloat64AtomicAdd =  false,
564       .shaderSharedFloat32Atomics =    true,
565       .shaderSharedFloat32AtomicAdd =  true,
566       .shaderSharedFloat64Atomics =    false,
567       .shaderSharedFloat64AtomicAdd =  false,
568       .shaderImageFloat32Atomics =     true,
569       .shaderImageFloat32AtomicAdd =   true,
570       .sparseImageFloat32Atomics =     false,
571       .sparseImageFloat32AtomicAdd =   false,
572 
573       /* VK_EXT_shader_atomic_float2 */
574       .shaderBufferFloat16Atomics      = false,
575       .shaderBufferFloat16AtomicAdd    = false,
576       .shaderBufferFloat16AtomicMinMax = false,
577       .shaderBufferFloat32AtomicMinMax = LLVM_VERSION_MAJOR >= 15,
578       .shaderBufferFloat64AtomicMinMax = false,
579       .shaderSharedFloat16Atomics      = false,
580       .shaderSharedFloat16AtomicAdd    = false,
581       .shaderSharedFloat16AtomicMinMax = false,
582       .shaderSharedFloat32AtomicMinMax = LLVM_VERSION_MAJOR >= 15,
583       .shaderSharedFloat64AtomicMinMax = false,
584       .shaderImageFloat32AtomicMinMax  = LLVM_VERSION_MAJOR >= 15,
585       .sparseImageFloat32AtomicMinMax  = false,
586 
587       /* VK_EXT_memory_priority */
588       .memoryPriority = true,
589 
590       /* VK_EXT_pageable_device_local_memory */
591       .pageableDeviceLocalMemory = true,
592 
593       /* VK_EXT_nested_command_buffer */
594       .nestedCommandBuffer = true,
595       .nestedCommandBufferRendering = true,
596       .nestedCommandBufferSimultaneousUse = true,
597 
598       /* VK_KHR_dynamic_rendering_local_read */
599       .dynamicRenderingLocalRead = true,
600 
601       /* VK_EXT_mesh_shader */
602       .taskShader = true,
603       .meshShader = true,
604       .multiviewMeshShader = false,
605       .primitiveFragmentShadingRateMeshShader = false,
606       .meshShaderQueries = true,
607 
608       /* host_image_copy */
609       .hostImageCopy = true,
610 
611       /* maintenance5 */
612       .maintenance5 = true,
613 
614       /* VK_EXT_ycbcr_2plane_444_formats */
615       .ycbcr2plane444Formats = true,
616 
617       /* VK_EXT_ycbcr_image_arrays */
618       .ycbcrImageArrays = true,
619 
620       /* maintenance6 */
621       .maintenance6 = true,
622 
623       /* VK_KHR_shader_expect_assume */
624       .shaderExpectAssume = true,
625 
626       /* VK_KHR_shader_maximal_reconvergence */
627       .shaderMaximalReconvergence = true,
628 
629       /* VK_AMDX_shader_enqueue */
630 #ifdef VK_ENABLE_BETA_EXTENSIONS
631       .shaderEnqueue = true,
632 #endif
633    };
634 }
635 
636 extern unsigned lp_native_vector_width;
637 
638 static VkImageLayout lvp_host_copy_image_layouts[] = {
639    VK_IMAGE_LAYOUT_GENERAL,
640    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
641    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
642    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
643    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
644    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
645    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
646    VK_IMAGE_LAYOUT_PREINITIALIZED,
647    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
648    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
649    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
650    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
651    VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
652    VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
653    VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL,
654    VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL,
655    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
656    VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR,
657    VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR,
658    VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR,
659    VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
660    VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
661    VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
662 };
663 
664 static void
lvp_get_properties(const struct lvp_physical_device * device,struct vk_properties * p)665 lvp_get_properties(const struct lvp_physical_device *device, struct vk_properties *p)
666 {
667    VkSampleCountFlags sample_counts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
668 
669    uint64_t grid_size[3], block_size[3];
670    uint64_t max_threads_per_block, max_local_size;
671 
672    device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
673                                        PIPE_COMPUTE_CAP_MAX_GRID_SIZE, grid_size);
674    device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
675                                        PIPE_COMPUTE_CAP_MAX_BLOCK_SIZE, block_size);
676    device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
677                                        PIPE_COMPUTE_CAP_MAX_THREADS_PER_BLOCK,
678                                        &max_threads_per_block);
679    device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
680                                        PIPE_COMPUTE_CAP_MAX_LOCAL_SIZE,
681                                        &max_local_size);
682 
683    const uint64_t max_render_targets = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_RENDER_TARGETS);
684 
685    int texel_buffer_alignment = device->pscreen->get_param(device->pscreen, PIPE_CAP_TEXTURE_BUFFER_OFFSET_ALIGNMENT);
686 
687    STATIC_ASSERT(sizeof(struct lp_descriptor) <= 256);
688    *p = (struct vk_properties) {
689       /* Vulkan 1.0 */
690       .apiVersion = LVP_API_VERSION,
691       .driverVersion = 1,
692       .vendorID = VK_VENDOR_ID_MESA,
693       .deviceID = 0,
694       .deviceType = VK_PHYSICAL_DEVICE_TYPE_CPU,
695       .maxImageDimension1D                      = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
696       .maxImageDimension2D                      = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
697       .maxImageDimension3D                      = (1 << device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_3D_LEVELS)),
698       .maxImageDimensionCube                    = (1 << device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_CUBE_LEVELS)),
699       .maxImageArrayLayers                      = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS),
700       .maxTexelBufferElements                   = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXEL_BUFFER_ELEMENTS_UINT),
701       .maxUniformBufferRange                    = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_CONST_BUFFER0_SIZE),
702       .maxStorageBufferRange                    = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_SHADER_BUFFER_SIZE_UINT),
703       .maxPushConstantsSize                     = MAX_PUSH_CONSTANTS_SIZE,
704       .maxMemoryAllocationCount                 = UINT32_MAX,
705       .maxSamplerAllocationCount                = 32 * 1024,
706       .bufferImageGranularity                   = 64, /* A cache line */
707       .sparseAddressSpaceSize                   = 0,
708       .maxBoundDescriptorSets                   = MAX_SETS,
709       .maxPerStageDescriptorSamplers            = MAX_DESCRIPTORS,
710       .maxPerStageDescriptorUniformBuffers      = MAX_DESCRIPTORS,
711       .maxPerStageDescriptorStorageBuffers      = MAX_DESCRIPTORS,
712       .maxPerStageDescriptorSampledImages       = MAX_DESCRIPTORS,
713       .maxPerStageDescriptorStorageImages       = MAX_DESCRIPTORS,
714       .maxPerStageDescriptorInputAttachments    = MAX_DESCRIPTORS,
715       .maxPerStageResources                     = MAX_DESCRIPTORS,
716       .maxDescriptorSetSamplers                 = MAX_DESCRIPTORS,
717       .maxDescriptorSetUniformBuffers           = MAX_DESCRIPTORS,
718       .maxDescriptorSetUniformBuffersDynamic    = MAX_DESCRIPTORS,
719       .maxDescriptorSetStorageBuffers           = MAX_DESCRIPTORS,
720       .maxDescriptorSetStorageBuffersDynamic    = MAX_DESCRIPTORS,
721       .maxDescriptorSetSampledImages            = MAX_DESCRIPTORS,
722       .maxDescriptorSetStorageImages            = MAX_DESCRIPTORS,
723       .maxDescriptorSetInputAttachments         = MAX_DESCRIPTORS,
724       .maxVertexInputAttributes                 = 32,
725       .maxVertexInputBindings                   = 32,
726       .maxVertexInputAttributeOffset            = 2047,
727       .maxVertexInputBindingStride              = 2048,
728       .maxVertexOutputComponents                = 128,
729       .maxTessellationGenerationLevel           = 64,
730       .maxTessellationPatchSize                 = 32,
731       .maxTessellationControlPerVertexInputComponents = 128,
732       .maxTessellationControlPerVertexOutputComponents = 128,
733       .maxTessellationControlPerPatchOutputComponents = 128,
734       .maxTessellationControlTotalOutputComponents = 4096,
735       .maxTessellationEvaluationInputComponents = 128,
736       .maxTessellationEvaluationOutputComponents = 128,
737       .maxGeometryShaderInvocations             = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_GS_INVOCATIONS),
738       .maxGeometryInputComponents               = 64,
739       .maxGeometryOutputComponents              = 128,
740       .maxGeometryOutputVertices                = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_GEOMETRY_OUTPUT_VERTICES),
741       .maxGeometryTotalOutputComponents         = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS),
742       .maxFragmentInputComponents               = 128,
743       .maxFragmentOutputAttachments             = 8,
744       .maxFragmentDualSrcAttachments            = 2,
745       .maxFragmentCombinedOutputResources       = max_render_targets +
746                                                   device->pscreen->get_shader_param(device->pscreen, MESA_SHADER_FRAGMENT,
747                                                      PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) +
748                                                   device->pscreen->get_shader_param(device->pscreen, MESA_SHADER_FRAGMENT,
749                                                      PIPE_SHADER_CAP_MAX_SHADER_IMAGES),
750       .maxComputeSharedMemorySize               = max_local_size,
751       .maxComputeWorkGroupCount                 = { grid_size[0], grid_size[1], grid_size[2] },
752       .maxComputeWorkGroupInvocations           = max_threads_per_block,
753       .maxComputeWorkGroupSize                  = { block_size[0], block_size[1], block_size[2] },
754       .subPixelPrecisionBits                    = device->pscreen->get_param(device->pscreen, PIPE_CAP_RASTERIZER_SUBPIXEL_BITS),
755       .subTexelPrecisionBits                    = 8,
756       .mipmapPrecisionBits                      = 4,
757       .maxDrawIndexedIndexValue                 = UINT32_MAX,
758       .maxDrawIndirectCount                     = UINT32_MAX,
759       .maxSamplerLodBias                        = 16,
760       .maxSamplerAnisotropy                     = 16,
761       .maxViewports                             = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_VIEWPORTS),
762       .maxViewportDimensions                    = { (1 << 14), (1 << 14) },
763       .viewportBoundsRange                      = { -32768.0, 32768.0 },
764       .viewportSubPixelBits                     = device->pscreen->get_param(device->pscreen, PIPE_CAP_VIEWPORT_SUBPIXEL_BITS),
765       .minMemoryMapAlignment                    = device->pscreen->get_param(device->pscreen, PIPE_CAP_MIN_MAP_BUFFER_ALIGNMENT),
766       .minTexelBufferOffsetAlignment            = device->pscreen->get_param(device->pscreen, PIPE_CAP_TEXTURE_BUFFER_OFFSET_ALIGNMENT),
767       .minUniformBufferOffsetAlignment          = device->pscreen->get_param(device->pscreen, PIPE_CAP_CONSTANT_BUFFER_OFFSET_ALIGNMENT),
768       .minStorageBufferOffsetAlignment          = device->pscreen->get_param(device->pscreen, PIPE_CAP_SHADER_BUFFER_OFFSET_ALIGNMENT),
769       .minTexelOffset                           = device->pscreen->get_param(device->pscreen, PIPE_CAP_MIN_TEXEL_OFFSET),
770       .maxTexelOffset                           = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXEL_OFFSET),
771       .minTexelGatherOffset                     = device->pscreen->get_param(device->pscreen, PIPE_CAP_MIN_TEXTURE_GATHER_OFFSET),
772       .maxTexelGatherOffset                     = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_GATHER_OFFSET),
773       .minInterpolationOffset                   = -2, /* FIXME */
774       .maxInterpolationOffset                   = 2, /* FIXME */
775       .subPixelInterpolationOffsetBits          = 8, /* FIXME */
776       .maxFramebufferWidth                      = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
777       .maxFramebufferHeight                     = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
778       .maxFramebufferLayers                     = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS),
779       .framebufferColorSampleCounts             = sample_counts,
780       .framebufferDepthSampleCounts             = sample_counts,
781       .framebufferStencilSampleCounts           = sample_counts,
782       .framebufferNoAttachmentsSampleCounts     = sample_counts,
783       .maxColorAttachments                      = max_render_targets,
784       .sampledImageColorSampleCounts            = sample_counts,
785       .sampledImageIntegerSampleCounts          = sample_counts,
786       .sampledImageDepthSampleCounts            = sample_counts,
787       .sampledImageStencilSampleCounts          = sample_counts,
788       .storageImageSampleCounts                 = sample_counts,
789       .maxSampleMaskWords                       = 1,
790       .timestampComputeAndGraphics              = true,
791       .timestampPeriod                          = 1,
792       .maxClipDistances                         = 8,
793       .maxCullDistances                         = 8,
794       .maxCombinedClipAndCullDistances          = 8,
795       .discreteQueuePriorities                  = 2,
796       .pointSizeRange                           = { 0.0, device->pscreen->get_paramf(device->pscreen, PIPE_CAPF_MAX_POINT_SIZE) },
797       .lineWidthRange                           = { 1.0, device->pscreen->get_paramf(device->pscreen, PIPE_CAPF_MAX_LINE_WIDTH) },
798       .pointSizeGranularity                     = (1.0 / 8.0),
799       .lineWidthGranularity                     = 1.0 / 128.0,
800       .strictLines                              = true,
801       .standardSampleLocations                  = true,
802       .optimalBufferCopyOffsetAlignment         = 128,
803       .optimalBufferCopyRowPitchAlignment       = 128,
804       .nonCoherentAtomSize                      = 64,
805 
806       /* Vulkan 1.1 */
807       /* The LUID is for Windows. */
808       .deviceLUIDValid = false,
809       .deviceNodeMask = 0,
810 
811       .subgroupSize = lp_native_vector_width / 32,
812       .subgroupSupportedStages = VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_TASK_BIT_EXT | VK_SHADER_STAGE_MESH_BIT_EXT,
813       .subgroupSupportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT | VK_SUBGROUP_FEATURE_VOTE_BIT | VK_SUBGROUP_FEATURE_ARITHMETIC_BIT | VK_SUBGROUP_FEATURE_BALLOT_BIT,
814       .subgroupQuadOperationsInAllStages = false,
815 
816       .pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
817       .maxMultiviewViewCount = 6,
818       .maxMultiviewInstanceIndex = INT_MAX,
819       .protectedNoFault = false,
820       .maxPerSetDescriptors = MAX_DESCRIPTORS,
821       .maxMemoryAllocationSize = (1u << 31),
822 
823       /* Vulkan 1.2 */
824       .driverID = VK_DRIVER_ID_MESA_LLVMPIPE,
825 
826       .conformanceVersion = (VkConformanceVersion){
827          .major = 1,
828          .minor = 3,
829          .subminor = 1,
830          .patch = 1,
831       },
832 
833       .denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
834       .roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
835       .shaderDenormFlushToZeroFloat16 = false,
836       .shaderDenormPreserveFloat16 = false,
837       .shaderRoundingModeRTEFloat16 = true,
838       .shaderRoundingModeRTZFloat16 = false,
839       .shaderSignedZeroInfNanPreserveFloat16 = true,
840 
841       .shaderDenormFlushToZeroFloat32 = false,
842       .shaderDenormPreserveFloat32 = false,
843       .shaderRoundingModeRTEFloat32 = true,
844       .shaderRoundingModeRTZFloat32 = false,
845       .shaderSignedZeroInfNanPreserveFloat32 = true,
846 
847       .shaderDenormFlushToZeroFloat64 = false,
848       .shaderDenormPreserveFloat64 = false,
849       .shaderRoundingModeRTEFloat64 = true,
850       .shaderRoundingModeRTZFloat64 = false,
851       .shaderSignedZeroInfNanPreserveFloat64 = true,
852 
853       .maxUpdateAfterBindDescriptorsInAllPools = UINT32_MAX,
854       .shaderUniformBufferArrayNonUniformIndexingNative = true,
855       .shaderSampledImageArrayNonUniformIndexingNative = true,
856       .shaderStorageBufferArrayNonUniformIndexingNative = true,
857       .shaderStorageImageArrayNonUniformIndexingNative = true,
858       .shaderInputAttachmentArrayNonUniformIndexingNative = true,
859       .robustBufferAccessUpdateAfterBind = true,
860       .quadDivergentImplicitLod = true,
861       .maxPerStageDescriptorUpdateAfterBindSamplers = MAX_DESCRIPTORS,
862       .maxPerStageDescriptorUpdateAfterBindUniformBuffers = MAX_DESCRIPTORS,
863       .maxPerStageDescriptorUpdateAfterBindStorageBuffers = MAX_DESCRIPTORS,
864       .maxPerStageDescriptorUpdateAfterBindSampledImages = MAX_DESCRIPTORS,
865       .maxPerStageDescriptorUpdateAfterBindStorageImages = MAX_DESCRIPTORS,
866       .maxPerStageDescriptorUpdateAfterBindInputAttachments = MAX_DESCRIPTORS,
867       .maxPerStageUpdateAfterBindResources = MAX_DESCRIPTORS,
868       .maxDescriptorSetUpdateAfterBindSamplers = MAX_DESCRIPTORS,
869       .maxDescriptorSetUpdateAfterBindUniformBuffers = MAX_DESCRIPTORS,
870       .maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = MAX_DESCRIPTORS,
871       .maxDescriptorSetUpdateAfterBindStorageBuffers = MAX_DESCRIPTORS,
872       .maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = MAX_DESCRIPTORS,
873       .maxDescriptorSetUpdateAfterBindSampledImages = MAX_DESCRIPTORS,
874       .maxDescriptorSetUpdateAfterBindStorageImages = MAX_DESCRIPTORS,
875       .maxDescriptorSetUpdateAfterBindInputAttachments = MAX_DESCRIPTORS,
876 
877       .supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT | VK_RESOLVE_MODE_AVERAGE_BIT,
878       .supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
879       .independentResolveNone = false,
880       .independentResolve = false,
881 
882       .filterMinmaxImageComponentMapping = true,
883       .filterMinmaxSingleComponentFormats = true,
884 
885       .maxTimelineSemaphoreValueDifference = UINT64_MAX,
886       .framebufferIntegerColorSampleCounts = VK_SAMPLE_COUNT_1_BIT,
887 
888       /* Vulkan 1.3 */
889       .minSubgroupSize = lp_native_vector_width / 32,
890       .maxSubgroupSize = lp_native_vector_width / 32,
891       .maxComputeWorkgroupSubgroups = 32,
892       .requiredSubgroupSizeStages = VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT,
893       .maxInlineUniformTotalSize = MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE * MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS * MAX_SETS,
894       .maxInlineUniformBlockSize = MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE,
895       .maxPerStageDescriptorInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS,
896       .maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS,
897       .maxDescriptorSetInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS,
898       .maxDescriptorSetUpdateAfterBindInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS,
899       .storageTexelBufferOffsetAlignmentBytes = texel_buffer_alignment,
900       .storageTexelBufferOffsetSingleTexelAlignment = true,
901       .uniformTexelBufferOffsetAlignmentBytes = texel_buffer_alignment,
902       .uniformTexelBufferOffsetSingleTexelAlignment = true,
903       .maxBufferSize = UINT32_MAX,
904 
905       /* VK_KHR_push_descriptor */
906       .maxPushDescriptors = MAX_PUSH_DESCRIPTORS,
907 
908       /* VK_EXT_host_image_copy */
909       .pCopySrcLayouts = lvp_host_copy_image_layouts,
910       .copySrcLayoutCount = ARRAY_SIZE(lvp_host_copy_image_layouts),
911       .pCopyDstLayouts = lvp_host_copy_image_layouts,
912       .copyDstLayoutCount = ARRAY_SIZE(lvp_host_copy_image_layouts),
913       .identicalMemoryTypeRequirements = VK_FALSE,
914 
915       /* VK_EXT_transform_feedback */
916       .maxTransformFeedbackStreams = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_VERTEX_STREAMS),
917       .maxTransformFeedbackBuffers = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS),
918       .maxTransformFeedbackBufferSize = UINT32_MAX,
919       .maxTransformFeedbackStreamDataSize = 512,
920       .maxTransformFeedbackBufferDataSize = 512,
921       .maxTransformFeedbackBufferDataStride = 512,
922       .transformFeedbackQueries = true,
923       .transformFeedbackStreamsLinesTriangles = false,
924       .transformFeedbackRasterizationStreamSelect = false,
925       .transformFeedbackDraw = true,
926 
927       /* VK_KHR_maintenance5 */
928       /* FIXME No idea about most of these ones. */
929       .earlyFragmentMultisampleCoverageAfterSampleCounting = true,
930       .earlyFragmentSampleMaskTestBeforeSampleCounting = false,
931       .depthStencilSwizzleOneSupport = false,
932       .polygonModePointSize = true, /* This one is correct. */
933       .nonStrictSinglePixelWideLinesUseParallelogram = false,
934       .nonStrictWideLinesUseParallelogram = false,
935 
936       /* maintenance6 */
937       .maxCombinedImageSamplerDescriptorCount = 3,
938 
939       /* VK_EXT_extended_dynamic_state3 */
940       .dynamicPrimitiveTopologyUnrestricted = VK_TRUE,
941 
942       /* VK_EXT_line_rasterization */
943       .lineSubPixelPrecisionBits = device->pscreen->get_param(device->pscreen, PIPE_CAP_RASTERIZER_SUBPIXEL_BITS),
944 
945       /* VK_NV_device_generated_commands */
946       .maxGraphicsShaderGroupCount = 1<<12,
947       .maxIndirectSequenceCount = 1<<20,
948       .maxIndirectCommandsTokenCount = MAX_DGC_TOKENS,
949       .maxIndirectCommandsStreamCount = MAX_DGC_STREAMS,
950       .maxIndirectCommandsTokenOffset = 2047,
951       .maxIndirectCommandsStreamStride = 2048,
952       .minSequencesCountBufferOffsetAlignment = 4,
953       .minSequencesIndexBufferOffsetAlignment = 4,
954       .minIndirectCommandsBufferOffsetAlignment = 4,
955 
956       /* VK_EXT_external_memory_host */
957       .minImportedHostPointerAlignment = 4096,
958 
959       /* VK_EXT_custom_border_color */
960       .maxCustomBorderColorSamplers = 32 * 1024,
961 
962       /* VK_EXT_provoking_vertex */
963       .provokingVertexModePerPipeline = true,
964       .transformFeedbackPreservesTriangleFanProvokingVertex = true,
965 
966       /* VK_EXT_multi_draw */
967       .maxMultiDrawCount = 2048,
968 
969       /* VK_EXT_descriptor_buffer */
970       .combinedImageSamplerDescriptorSingleArray = VK_TRUE,
971       .bufferlessPushDescriptors = VK_TRUE,
972       .descriptorBufferOffsetAlignment = 4,
973       .maxDescriptorBufferBindings = MAX_SETS,
974       .maxResourceDescriptorBufferBindings = MAX_SETS,
975       .maxSamplerDescriptorBufferBindings = MAX_SETS,
976       .maxEmbeddedImmutableSamplerBindings = MAX_SETS,
977       .maxEmbeddedImmutableSamplers = 2032,
978       .bufferCaptureReplayDescriptorDataSize = 0,
979       .imageCaptureReplayDescriptorDataSize = 0,
980       .imageViewCaptureReplayDescriptorDataSize = 0,
981       .samplerCaptureReplayDescriptorDataSize = 0,
982       .accelerationStructureCaptureReplayDescriptorDataSize = 0,
983       .samplerDescriptorSize = sizeof(struct lp_descriptor),
984       .combinedImageSamplerDescriptorSize = sizeof(struct lp_descriptor),
985       .sampledImageDescriptorSize = sizeof(struct lp_descriptor),
986       .storageImageDescriptorSize = sizeof(struct lp_descriptor),
987       .uniformTexelBufferDescriptorSize = sizeof(struct lp_descriptor),
988       .robustUniformTexelBufferDescriptorSize = sizeof(struct lp_descriptor),
989       .storageTexelBufferDescriptorSize = sizeof(struct lp_descriptor),
990       .robustStorageTexelBufferDescriptorSize = sizeof(struct lp_descriptor),
991       .uniformBufferDescriptorSize = sizeof(struct lp_descriptor),
992       .robustUniformBufferDescriptorSize = sizeof(struct lp_descriptor),
993       .storageBufferDescriptorSize = sizeof(struct lp_descriptor),
994       .robustStorageBufferDescriptorSize = sizeof(struct lp_descriptor),
995       .inputAttachmentDescriptorSize = sizeof(struct lp_descriptor),
996       .accelerationStructureDescriptorSize = 0,
997       .maxSamplerDescriptorBufferRange = 1<<27, //spec minimum
998       .maxResourceDescriptorBufferRange = 1<<28, //spec minimum
999       .resourceDescriptorBufferAddressSpaceSize = 1<<27, //spec minimum
1000       .samplerDescriptorBufferAddressSpaceSize = 1<<27, //spec minimum
1001       .descriptorBufferAddressSpaceSize = 1<<27, //spec minimum
1002 
1003       /* VK_EXT_graphics_pipeline_library */
1004       .graphicsPipelineLibraryFastLinking = VK_TRUE,
1005       .graphicsPipelineLibraryIndependentInterpolationDecoration = VK_TRUE,
1006 
1007       /* VK_EXT_robustness2 */
1008       .robustStorageBufferAccessSizeAlignment = 1,
1009       .robustUniformBufferAccessSizeAlignment = 1,
1010 
1011       /* VK_EXT_mesh_shader */
1012       .maxTaskWorkGroupTotalCount = 4194304,
1013       .maxTaskWorkGroupCount[0] = 65536,
1014       .maxTaskWorkGroupCount[1] = 65536,
1015       .maxTaskWorkGroupCount[2] = 65536,
1016       .maxTaskWorkGroupInvocations = 1024,
1017       .maxTaskWorkGroupSize[0] = 1024,
1018       .maxTaskWorkGroupSize[1] = 1024,
1019       .maxTaskWorkGroupSize[2] = 1024,
1020       .maxTaskPayloadSize = 16384,
1021       .maxTaskSharedMemorySize = 32768,
1022       .maxTaskPayloadAndSharedMemorySize = 32768,
1023 
1024       .maxMeshWorkGroupTotalCount = 4194304,
1025       .maxMeshWorkGroupCount[0] = 65536,
1026       .maxMeshWorkGroupCount[1] = 65536,
1027       .maxMeshWorkGroupCount[2] = 65536,
1028       .maxMeshWorkGroupInvocations = 1024,
1029       .maxMeshWorkGroupSize[0] = 1024,
1030       .maxMeshWorkGroupSize[1] = 1024,
1031       .maxMeshWorkGroupSize[2] = 1024,
1032       .maxMeshOutputMemorySize = 32768, /* 32K min required */
1033       .maxMeshSharedMemorySize = 28672,     /* 28K min required */
1034       .maxMeshOutputComponents = 128, /* 32x vec4 min required */
1035       .maxMeshOutputVertices = 256,
1036       .maxMeshOutputPrimitives = 256,
1037       .maxMeshOutputLayers = 8,
1038       .meshOutputPerVertexGranularity = 1,
1039       .meshOutputPerPrimitiveGranularity = 1,
1040       .maxPreferredTaskWorkGroupInvocations = 64,
1041       .maxPreferredMeshWorkGroupInvocations = 128,
1042       .prefersLocalInvocationVertexOutput = true,
1043       .prefersLocalInvocationPrimitiveOutput = true,
1044       .prefersCompactVertexOutput = true,
1045       .prefersCompactPrimitiveOutput = false,
1046 
1047       /* VK_AMDX_shader_enqueue */
1048 #ifdef VK_ENABLE_BETA_EXTENSIONS
1049       .maxExecutionGraphDepth = 32,
1050       .maxExecutionGraphShaderOutputNodes = LVP_MAX_EXEC_GRAPH_PAYLOADS,
1051       .maxExecutionGraphShaderPayloadSize = 0xFFFF,
1052       .maxExecutionGraphShaderPayloadCount = LVP_MAX_EXEC_GRAPH_PAYLOADS,
1053       .executionGraphDispatchAddressAlignment = 4,
1054 #endif
1055    };
1056 
1057    /* Vulkan 1.0 */
1058    strcpy(p->deviceName, device->pscreen->get_name(device->pscreen));
1059    lvp_device_get_cache_uuid(p->pipelineCacheUUID);
1060 
1061    /* Vulkan 1.1 */
1062    device->pscreen->get_device_uuid(device->pscreen, (char*)(p->deviceUUID));
1063    device->pscreen->get_driver_uuid(device->pscreen, (char*)(p->driverUUID));
1064    memset(p->deviceLUID, 0, VK_LUID_SIZE);
1065 
1066 #if LLVM_VERSION_MAJOR >= 10
1067    p->subgroupSupportedOperations |= VK_SUBGROUP_FEATURE_SHUFFLE_BIT | VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT | VK_SUBGROUP_FEATURE_QUAD_BIT;
1068 #endif
1069 
1070    /* Vulkan 1.2 */
1071    snprintf(p->driverName, VK_MAX_DRIVER_NAME_SIZE, "llvmpipe");
1072    snprintf(p->driverInfo, VK_MAX_DRIVER_INFO_SIZE, "Mesa " PACKAGE_VERSION MESA_GIT_SHA1
1073 #ifdef MESA_LLVM_VERSION_STRING
1074             " (LLVM " MESA_LLVM_VERSION_STRING ")"
1075 #endif
1076            );
1077 
1078    /* VK_EXT_nested_command_buffer */
1079    p->maxCommandBufferNestingLevel = UINT32_MAX;
1080 
1081    /* VK_EXT_host_image_copy */
1082    lvp_device_get_cache_uuid(p->optimalTilingLayoutUUID);
1083 
1084    /* VK_EXT_vertex_attribute_divisor */
1085    if (device->pscreen->get_param(device->pscreen, PIPE_CAP_VERTEX_ELEMENT_INSTANCE_DIVISOR) != 0)
1086       p->maxVertexAttribDivisor = UINT32_MAX;
1087    else
1088       p->maxVertexAttribDivisor = 1;
1089 
1090    /* maintenance6 */
1091    p->blockTexelViewCompatibleMultipleLayers = true,
1092 
1093    /* VK_EXT_shader_object */
1094    /* this is basically unsupported */
1095    lvp_device_get_cache_uuid(p->shaderBinaryUUID);
1096    p->shaderBinaryVersion = 1;
1097 
1098    /* VK_EXT_mesh_shader */
1099    p->maxMeshPayloadAndSharedMemorySize = p->maxTaskPayloadSize + p->maxMeshSharedMemorySize; /* 28K min required */
1100    p->maxMeshPayloadAndOutputMemorySize = p->maxTaskPayloadSize + p->maxMeshOutputMemorySize; /* 47K min required */
1101 }
1102 
1103 static VkResult VKAPI_CALL
lvp_physical_device_init(struct lvp_physical_device * device,struct lvp_instance * instance,struct pipe_loader_device * pld)1104 lvp_physical_device_init(struct lvp_physical_device *device,
1105                          struct lvp_instance *instance,
1106                          struct pipe_loader_device *pld)
1107 {
1108    VkResult result;
1109 
1110    struct vk_physical_device_dispatch_table dispatch_table;
1111    vk_physical_device_dispatch_table_from_entrypoints(
1112       &dispatch_table, &lvp_physical_device_entrypoints, true);
1113    vk_physical_device_dispatch_table_from_entrypoints(
1114       &dispatch_table, &wsi_physical_device_entrypoints, false);
1115    result = vk_physical_device_init(&device->vk, &instance->vk,
1116                                     NULL, NULL, NULL, &dispatch_table);
1117    if (result != VK_SUCCESS) {
1118       vk_error(instance, result);
1119       goto fail;
1120    }
1121    device->pld = pld;
1122 
1123    device->pscreen = pipe_loader_create_screen_vk(device->pld, true);
1124    if (!device->pscreen)
1125       return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1126    for (unsigned i = 0; i < ARRAY_SIZE(device->drv_options); i++)
1127       device->drv_options[i] = device->pscreen->get_compiler_options(device->pscreen, PIPE_SHADER_IR_NIR, i);
1128 
1129    device->sync_timeline_type = vk_sync_timeline_get_type(&lvp_pipe_sync_type);
1130    device->sync_types[0] = &lvp_pipe_sync_type;
1131    device->sync_types[1] = &device->sync_timeline_type.sync;
1132    device->sync_types[2] = NULL;
1133    device->vk.supported_sync_types = device->sync_types;
1134 
1135    device->max_images = device->pscreen->get_shader_param(device->pscreen, MESA_SHADER_FRAGMENT, PIPE_SHADER_CAP_MAX_SHADER_IMAGES);
1136    device->vk.supported_extensions = lvp_device_extensions_supported;
1137    lvp_get_features(device, &device->vk.supported_features);
1138    lvp_get_properties(device, &device->vk.properties);
1139 
1140    result = lvp_init_wsi(device);
1141    if (result != VK_SUCCESS) {
1142       vk_physical_device_finish(&device->vk);
1143       vk_error(instance, result);
1144       goto fail;
1145    }
1146 
1147    return VK_SUCCESS;
1148  fail:
1149    return result;
1150 }
1151 
1152 static void VKAPI_CALL
lvp_physical_device_finish(struct lvp_physical_device * device)1153 lvp_physical_device_finish(struct lvp_physical_device *device)
1154 {
1155    lvp_finish_wsi(device);
1156    device->pscreen->destroy(device->pscreen);
1157    vk_physical_device_finish(&device->vk);
1158 }
1159 
1160 static void
lvp_destroy_physical_device(struct vk_physical_device * device)1161 lvp_destroy_physical_device(struct vk_physical_device *device)
1162 {
1163    lvp_physical_device_finish((struct lvp_physical_device *)device);
1164    vk_free(&device->instance->alloc, device);
1165 }
1166 
1167 static VkResult
1168 lvp_enumerate_physical_devices(struct vk_instance *vk_instance);
1169 
lvp_CreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)1170 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateInstance(
1171    const VkInstanceCreateInfo*                 pCreateInfo,
1172    const VkAllocationCallbacks*                pAllocator,
1173    VkInstance*                                 pInstance)
1174 {
1175    struct lvp_instance *instance;
1176    VkResult result;
1177 
1178    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
1179 
1180    if (pAllocator == NULL)
1181       pAllocator = vk_default_allocator();
1182 
1183    instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
1184                         VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1185    if (!instance)
1186       return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
1187 
1188    struct vk_instance_dispatch_table dispatch_table;
1189    vk_instance_dispatch_table_from_entrypoints(
1190       &dispatch_table, &lvp_instance_entrypoints, true);
1191    vk_instance_dispatch_table_from_entrypoints(
1192       &dispatch_table, &wsi_instance_entrypoints, false);
1193 
1194    result = vk_instance_init(&instance->vk,
1195                              &lvp_instance_extensions_supported,
1196                              &dispatch_table,
1197                              pCreateInfo,
1198                              pAllocator);
1199    if (result != VK_SUCCESS) {
1200       vk_free(pAllocator, instance);
1201       return vk_error(NULL, result);
1202    }
1203 
1204    instance->apiVersion = LVP_API_VERSION;
1205 
1206    instance->vk.physical_devices.enumerate = lvp_enumerate_physical_devices;
1207    instance->vk.physical_devices.destroy = lvp_destroy_physical_device;
1208 
1209    //   _mesa_locale_init();
1210    //   VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
1211 
1212    *pInstance = lvp_instance_to_handle(instance);
1213 
1214    return VK_SUCCESS;
1215 }
1216 
lvp_DestroyInstance(VkInstance _instance,const VkAllocationCallbacks * pAllocator)1217 VKAPI_ATTR void VKAPI_CALL lvp_DestroyInstance(
1218    VkInstance                                  _instance,
1219    const VkAllocationCallbacks*                pAllocator)
1220 {
1221    LVP_FROM_HANDLE(lvp_instance, instance, _instance);
1222 
1223    if (!instance)
1224       return;
1225    //   _mesa_locale_fini();
1226 
1227    pipe_loader_release(&instance->devs, instance->num_devices);
1228 
1229    vk_instance_finish(&instance->vk);
1230    vk_free(&instance->vk.alloc, instance);
1231 }
1232 
1233 #if defined(HAVE_DRI)
lvp_get_image(struct dri_drawable * dri_drawable,int x,int y,unsigned width,unsigned height,unsigned stride,void * data)1234 static void lvp_get_image(struct dri_drawable *dri_drawable,
1235                           int x, int y, unsigned width, unsigned height, unsigned stride,
1236                           void *data)
1237 {
1238 
1239 }
1240 
lvp_put_image(struct dri_drawable * dri_drawable,void * data,unsigned width,unsigned height)1241 static void lvp_put_image(struct dri_drawable *dri_drawable,
1242                           void *data, unsigned width, unsigned height)
1243 {
1244    fprintf(stderr, "put image %dx%d\n", width, height);
1245 }
1246 
lvp_put_image2(struct dri_drawable * dri_drawable,void * data,int x,int y,unsigned width,unsigned height,unsigned stride)1247 static void lvp_put_image2(struct dri_drawable *dri_drawable,
1248                            void *data, int x, int y, unsigned width, unsigned height,
1249                            unsigned stride)
1250 {
1251    fprintf(stderr, "put image 2 %d,%d %dx%d\n", x, y, width, height);
1252 }
1253 
1254 static struct drisw_loader_funcs lvp_sw_lf = {
1255    .get_image = lvp_get_image,
1256    .put_image = lvp_put_image,
1257    .put_image2 = lvp_put_image2,
1258 };
1259 #endif
1260 
1261 static VkResult
lvp_enumerate_physical_devices(struct vk_instance * vk_instance)1262 lvp_enumerate_physical_devices(struct vk_instance *vk_instance)
1263 {
1264    struct lvp_instance *instance =
1265       container_of(vk_instance, struct lvp_instance, vk);
1266 
1267    /* sw only for now */
1268    instance->num_devices = pipe_loader_sw_probe(NULL, 0);
1269 
1270    assert(instance->num_devices == 1);
1271 
1272 #if defined(HAVE_DRI)
1273    pipe_loader_sw_probe_dri(&instance->devs, &lvp_sw_lf);
1274 #else
1275    pipe_loader_sw_probe_null(&instance->devs);
1276 #endif
1277 
1278    struct lvp_physical_device *device =
1279       vk_zalloc2(&instance->vk.alloc, NULL, sizeof(*device), 8,
1280                  VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1281    if (!device)
1282       return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1283 
1284    VkResult result = lvp_physical_device_init(device, instance, &instance->devs[0]);
1285    if (result == VK_SUCCESS)
1286       list_addtail(&device->vk.link, &instance->vk.physical_devices.list);
1287    else
1288       vk_free(&vk_instance->alloc, device);
1289 
1290    return result;
1291 }
1292 
1293 void
lvp_device_get_cache_uuid(void * uuid)1294 lvp_device_get_cache_uuid(void *uuid)
1295 {
1296    memset(uuid, 'a', VK_UUID_SIZE);
1297    if (MESA_GIT_SHA1[0])
1298       /* debug build */
1299       memcpy(uuid, &MESA_GIT_SHA1[4], MIN2(strlen(MESA_GIT_SHA1) - 4, VK_UUID_SIZE));
1300    else
1301       /* release build */
1302       memcpy(uuid, PACKAGE_VERSION, MIN2(strlen(PACKAGE_VERSION), VK_UUID_SIZE));
1303 }
1304 
lvp_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1305 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceQueueFamilyProperties2(
1306    VkPhysicalDevice                            physicalDevice,
1307    uint32_t*                                   pCount,
1308    VkQueueFamilyProperties2                   *pQueueFamilyProperties)
1309 {
1310    VK_OUTARRAY_MAKE_TYPED(VkQueueFamilyProperties2, out, pQueueFamilyProperties, pCount);
1311 
1312    vk_outarray_append_typed(VkQueueFamilyProperties2, &out, p) {
1313       p->queueFamilyProperties = (VkQueueFamilyProperties) {
1314          .queueFlags = VK_QUEUE_GRAPHICS_BIT |
1315          VK_QUEUE_COMPUTE_BIT |
1316          VK_QUEUE_TRANSFER_BIT,
1317          .queueCount = 1,
1318          .timestampValidBits = 64,
1319          .minImageTransferGranularity = (VkExtent3D) { 1, 1, 1 },
1320       };
1321    }
1322 }
1323 
lvp_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1324 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceMemoryProperties(
1325    VkPhysicalDevice                            physicalDevice,
1326    VkPhysicalDeviceMemoryProperties*           pMemoryProperties)
1327 {
1328    pMemoryProperties->memoryTypeCount = 1;
1329    pMemoryProperties->memoryTypes[0] = (VkMemoryType) {
1330       .propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
1331       VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1332       VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
1333       VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
1334       .heapIndex = 0,
1335    };
1336 
1337    VkDeviceSize low_size = 3ULL*1024*1024*1024;
1338    VkDeviceSize total_size;
1339    os_get_total_physical_memory(&total_size);
1340    pMemoryProperties->memoryHeapCount = 1;
1341    pMemoryProperties->memoryHeaps[0] = (VkMemoryHeap) {
1342       .size = low_size,
1343       .flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
1344    };
1345    if (sizeof(void*) > sizeof(uint32_t))
1346       pMemoryProperties->memoryHeaps[0].size = total_size;
1347 }
1348 
lvp_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1349 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceMemoryProperties2(
1350    VkPhysicalDevice                            physicalDevice,
1351    VkPhysicalDeviceMemoryProperties2          *pMemoryProperties)
1352 {
1353    lvp_GetPhysicalDeviceMemoryProperties(physicalDevice,
1354                                          &pMemoryProperties->memoryProperties);
1355    VkPhysicalDeviceMemoryBudgetPropertiesEXT *props = vk_find_struct(pMemoryProperties, PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT);
1356    if (props) {
1357       props->heapBudget[0] = pMemoryProperties->memoryProperties.memoryHeaps[0].size;
1358       os_get_available_system_memory(&props->heapUsage[0]);
1359       props->heapUsage[0] = props->heapBudget[0] - props->heapUsage[0];
1360       memset(&props->heapBudget[1], 0, sizeof(props->heapBudget[0]) * (VK_MAX_MEMORY_HEAPS - 1));
1361       memset(&props->heapUsage[1], 0, sizeof(props->heapUsage[0]) * (VK_MAX_MEMORY_HEAPS - 1));
1362    }
1363 }
1364 
1365 VKAPI_ATTR VkResult VKAPI_CALL
lvp_GetMemoryHostPointerPropertiesEXT(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1366 lvp_GetMemoryHostPointerPropertiesEXT(
1367    VkDevice _device,
1368    VkExternalMemoryHandleTypeFlagBits handleType,
1369    const void *pHostPointer,
1370    VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1371 {
1372    switch (handleType) {
1373    case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT: {
1374       pMemoryHostPointerProperties->memoryTypeBits = 1;
1375       return VK_SUCCESS;
1376    }
1377    default:
1378       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1379    }
1380 }
1381 
lvp_GetInstanceProcAddr(VkInstance _instance,const char * pName)1382 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL lvp_GetInstanceProcAddr(
1383    VkInstance                                  _instance,
1384    const char*                                 pName)
1385 {
1386    VK_FROM_HANDLE(vk_instance, instance, _instance);
1387    return vk_instance_get_proc_addr(instance,
1388                                     &lvp_instance_entrypoints,
1389                                     pName);
1390 }
1391 
1392 /* Windows will use a dll definition file to avoid build errors. */
1393 #ifdef _WIN32
1394 #undef PUBLIC
1395 #define PUBLIC
1396 #endif
1397 
1398 /* The loader wants us to expose a second GetInstanceProcAddr function
1399  * to work around certain LD_PRELOAD issues seen in apps.
1400  */
1401 PUBLIC
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)1402 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(
1403    VkInstance                                  instance,
1404    const char*                                 pName)
1405 {
1406    return lvp_GetInstanceProcAddr(instance, pName);
1407 }
1408 
1409 static void
destroy_pipelines(struct lvp_queue * queue)1410 destroy_pipelines(struct lvp_queue *queue)
1411 {
1412    simple_mtx_lock(&queue->lock);
1413    while (util_dynarray_contains(&queue->pipeline_destroys, struct lvp_pipeline*)) {
1414       lvp_pipeline_destroy(queue->device, util_dynarray_pop(&queue->pipeline_destroys, struct lvp_pipeline*), true);
1415    }
1416    simple_mtx_unlock(&queue->lock);
1417 }
1418 
1419 static VkResult
lvp_queue_submit(struct vk_queue * vk_queue,struct vk_queue_submit * submit)1420 lvp_queue_submit(struct vk_queue *vk_queue,
1421                  struct vk_queue_submit *submit)
1422 {
1423    struct lvp_queue *queue = container_of(vk_queue, struct lvp_queue, vk);
1424 
1425    VkResult result = vk_sync_wait_many(&queue->device->vk,
1426                                        submit->wait_count, submit->waits,
1427                                        VK_SYNC_WAIT_COMPLETE, UINT64_MAX);
1428    if (result != VK_SUCCESS)
1429       return result;
1430 
1431    simple_mtx_lock(&queue->lock);
1432 
1433    for (uint32_t i = 0; i < submit->command_buffer_count; i++) {
1434       struct lvp_cmd_buffer *cmd_buffer =
1435          container_of(submit->command_buffers[i], struct lvp_cmd_buffer, vk);
1436 
1437       lvp_execute_cmds(queue->device, queue, cmd_buffer);
1438    }
1439 
1440    simple_mtx_unlock(&queue->lock);
1441 
1442    if (submit->command_buffer_count > 0)
1443       queue->ctx->flush(queue->ctx, &queue->last_fence, 0);
1444 
1445    for (uint32_t i = 0; i < submit->signal_count; i++) {
1446       struct lvp_pipe_sync *sync =
1447          vk_sync_as_lvp_pipe_sync(submit->signals[i].sync);
1448       lvp_pipe_sync_signal_with_fence(queue->device, sync, queue->last_fence);
1449    }
1450    destroy_pipelines(queue);
1451 
1452    return VK_SUCCESS;
1453 }
1454 
1455 static VkResult
lvp_queue_init(struct lvp_device * device,struct lvp_queue * queue,const VkDeviceQueueCreateInfo * create_info,uint32_t index_in_family)1456 lvp_queue_init(struct lvp_device *device, struct lvp_queue *queue,
1457                const VkDeviceQueueCreateInfo *create_info,
1458                uint32_t index_in_family)
1459 {
1460    VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info,
1461                                    index_in_family);
1462    if (result != VK_SUCCESS)
1463       return result;
1464 
1465    result = vk_queue_enable_submit_thread(&queue->vk);
1466    if (result != VK_SUCCESS) {
1467       vk_queue_finish(&queue->vk);
1468       return result;
1469    }
1470 
1471    queue->device = device;
1472 
1473    queue->ctx = device->pscreen->context_create(device->pscreen, NULL, PIPE_CONTEXT_ROBUST_BUFFER_ACCESS);
1474    queue->cso = cso_create_context(queue->ctx, CSO_NO_VBUF);
1475    queue->uploader = u_upload_create(queue->ctx, 1024 * 1024, PIPE_BIND_CONSTANT_BUFFER, PIPE_USAGE_STREAM, 0);
1476 
1477    queue->vk.driver_submit = lvp_queue_submit;
1478 
1479    simple_mtx_init(&queue->lock, mtx_plain);
1480    util_dynarray_init(&queue->pipeline_destroys, NULL);
1481 
1482    return VK_SUCCESS;
1483 }
1484 
1485 static void
lvp_queue_finish(struct lvp_queue * queue)1486 lvp_queue_finish(struct lvp_queue *queue)
1487 {
1488    vk_queue_finish(&queue->vk);
1489 
1490    destroy_pipelines(queue);
1491    simple_mtx_destroy(&queue->lock);
1492    util_dynarray_fini(&queue->pipeline_destroys);
1493 
1494    u_upload_destroy(queue->uploader);
1495    cso_destroy_context(queue->cso);
1496    queue->ctx->destroy(queue->ctx);
1497 }
1498 
lvp_CreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1499 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDevice(
1500    VkPhysicalDevice                            physicalDevice,
1501    const VkDeviceCreateInfo*                   pCreateInfo,
1502    const VkAllocationCallbacks*                pAllocator,
1503    VkDevice*                                   pDevice)
1504 {
1505    fprintf(stderr, "WARNING: lavapipe is not a conformant vulkan implementation, testing use only.\n");
1506 
1507    LVP_FROM_HANDLE(lvp_physical_device, physical_device, physicalDevice);
1508    struct lvp_device *device;
1509    struct lvp_instance *instance = (struct lvp_instance *)physical_device->vk.instance;
1510 
1511    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
1512 
1513    size_t state_size = lvp_get_rendering_state_size();
1514    device = vk_zalloc2(&physical_device->vk.instance->alloc, pAllocator,
1515                        sizeof(*device) + state_size, 8,
1516                        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1517    if (!device)
1518       return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1519 
1520    device->queue.state = device + 1;
1521    device->poison_mem = debug_get_bool_option("LVP_POISON_MEMORY", false);
1522    device->print_cmds = debug_get_bool_option("LVP_CMD_DEBUG", false);
1523 
1524    struct vk_device_dispatch_table dispatch_table;
1525    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1526       &lvp_device_entrypoints, true);
1527    lvp_add_enqueue_cmd_entrypoints(&dispatch_table);
1528    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1529       &wsi_device_entrypoints, false);
1530    VkResult result = vk_device_init(&device->vk,
1531                                     &physical_device->vk,
1532                                     &dispatch_table, pCreateInfo,
1533                                     pAllocator);
1534    if (result != VK_SUCCESS) {
1535       vk_free(&device->vk.alloc, device);
1536       return result;
1537    }
1538 
1539    vk_device_enable_threaded_submit(&device->vk);
1540    device->vk.command_buffer_ops = &lvp_cmd_buffer_ops;
1541 
1542    device->instance = (struct lvp_instance *)physical_device->vk.instance;
1543    device->physical_device = physical_device;
1544 
1545    device->pscreen = physical_device->pscreen;
1546 
1547    assert(pCreateInfo->queueCreateInfoCount == 1);
1548    assert(pCreateInfo->pQueueCreateInfos[0].queueFamilyIndex == 0);
1549    assert(pCreateInfo->pQueueCreateInfos[0].queueCount == 1);
1550    result = lvp_queue_init(device, &device->queue, pCreateInfo->pQueueCreateInfos, 0);
1551    if (result != VK_SUCCESS) {
1552       vk_free(&device->vk.alloc, device);
1553       return result;
1554    }
1555 
1556    nir_builder b = nir_builder_init_simple_shader(MESA_SHADER_FRAGMENT, NULL, "dummy_frag");
1557    struct pipe_shader_state shstate = {0};
1558    shstate.type = PIPE_SHADER_IR_NIR;
1559    shstate.ir.nir = b.shader;
1560    device->noop_fs = device->queue.ctx->create_fs_state(device->queue.ctx, &shstate);
1561    _mesa_hash_table_init(&device->bda, NULL, _mesa_hash_pointer, _mesa_key_pointer_equal);
1562    simple_mtx_init(&device->bda_lock, mtx_plain);
1563 
1564    uint32_t zero = 0;
1565    device->zero_buffer = pipe_buffer_create_with_data(device->queue.ctx, 0, PIPE_USAGE_IMMUTABLE, sizeof(uint32_t), &zero);
1566 
1567    device->null_texture_handle = (void *)(uintptr_t)device->queue.ctx->create_texture_handle(device->queue.ctx,
1568       &(struct pipe_sampler_view){ 0 }, NULL);
1569    device->null_image_handle = (void *)(uintptr_t)device->queue.ctx->create_image_handle(device->queue.ctx,
1570       &(struct pipe_image_view){ 0 });
1571 
1572    util_dynarray_init(&device->bda_texture_handles, NULL);
1573    util_dynarray_init(&device->bda_image_handles, NULL);
1574 
1575    *pDevice = lvp_device_to_handle(device);
1576 
1577    return VK_SUCCESS;
1578 
1579 }
1580 
lvp_DestroyDevice(VkDevice _device,const VkAllocationCallbacks * pAllocator)1581 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDevice(
1582    VkDevice                                    _device,
1583    const VkAllocationCallbacks*                pAllocator)
1584 {
1585    LVP_FROM_HANDLE(lvp_device, device, _device);
1586 
1587    util_dynarray_foreach(&device->bda_texture_handles, struct lp_texture_handle *, handle)
1588       device->queue.ctx->delete_texture_handle(device->queue.ctx, (uint64_t)(uintptr_t)*handle);
1589 
1590    util_dynarray_fini(&device->bda_texture_handles);
1591 
1592    util_dynarray_foreach(&device->bda_image_handles, struct lp_texture_handle *, handle)
1593       device->queue.ctx->delete_image_handle(device->queue.ctx, (uint64_t)(uintptr_t)*handle);
1594 
1595    util_dynarray_fini(&device->bda_image_handles);
1596 
1597    device->queue.ctx->delete_texture_handle(device->queue.ctx, (uint64_t)(uintptr_t)device->null_texture_handle);
1598    device->queue.ctx->delete_image_handle(device->queue.ctx, (uint64_t)(uintptr_t)device->null_image_handle);
1599 
1600    device->queue.ctx->delete_fs_state(device->queue.ctx, device->noop_fs);
1601 
1602    if (device->queue.last_fence)
1603       device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
1604    ralloc_free(device->bda.table);
1605    simple_mtx_destroy(&device->bda_lock);
1606    pipe_resource_reference(&device->zero_buffer, NULL);
1607 
1608    lvp_queue_finish(&device->queue);
1609    vk_device_finish(&device->vk);
1610    vk_free(&device->vk.alloc, device);
1611 }
1612 
lvp_EnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1613 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceExtensionProperties(
1614    const char*                                 pLayerName,
1615    uint32_t*                                   pPropertyCount,
1616    VkExtensionProperties*                      pProperties)
1617 {
1618    if (pLayerName)
1619       return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1620 
1621    return vk_enumerate_instance_extension_properties(
1622       &lvp_instance_extensions_supported, pPropertyCount, pProperties);
1623 }
1624 
lvp_EnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1625 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceLayerProperties(
1626    uint32_t*                                   pPropertyCount,
1627    VkLayerProperties*                          pProperties)
1628 {
1629    if (pProperties == NULL) {
1630       *pPropertyCount = 0;
1631       return VK_SUCCESS;
1632    }
1633 
1634    /* None supported at this time */
1635    return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1636 }
1637 
lvp_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1638 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateDeviceLayerProperties(
1639    VkPhysicalDevice                            physicalDevice,
1640    uint32_t*                                   pPropertyCount,
1641    VkLayerProperties*                          pProperties)
1642 {
1643    if (pProperties == NULL) {
1644       *pPropertyCount = 0;
1645       return VK_SUCCESS;
1646    }
1647 
1648    /* None supported at this time */
1649    return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1650 }
1651 
1652 static void
set_mem_priority(struct lvp_device_memory * mem,int priority)1653 set_mem_priority(struct lvp_device_memory *mem, int priority)
1654 {
1655 #if DETECT_OS_LINUX
1656    if (priority) {
1657       int advice = 0;
1658 #ifdef MADV_COLD
1659       if (priority < 0)
1660          advice |= MADV_COLD;
1661 #endif
1662       if (priority > 0)
1663          advice |= MADV_WILLNEED;
1664       if (advice)
1665          madvise(mem->pmem, mem->size, advice);
1666    }
1667 #endif
1668 }
1669 
1670 static int
get_mem_priority(float priority)1671 get_mem_priority(float priority)
1672 {
1673    if (priority < 0.3)
1674       return -1;
1675    if (priority < 0.6)
1676       return 0;
1677    return priority = 1;
1678 }
1679 
lvp_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)1680 VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateMemory(
1681    VkDevice                                    _device,
1682    const VkMemoryAllocateInfo*                 pAllocateInfo,
1683    const VkAllocationCallbacks*                pAllocator,
1684    VkDeviceMemory*                             pMem)
1685 {
1686    LVP_FROM_HANDLE(lvp_device, device, _device);
1687    struct lvp_device_memory *mem;
1688    ASSERTED const VkExportMemoryAllocateInfo *export_info = NULL;
1689    ASSERTED const VkImportMemoryFdInfoKHR *import_info = NULL;
1690    const VkImportMemoryHostPointerInfoEXT *host_ptr_info = NULL;
1691    VkResult error = VK_ERROR_OUT_OF_DEVICE_MEMORY;
1692    assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1693    int priority = 0;
1694 
1695    if (pAllocateInfo->allocationSize == 0) {
1696       /* Apparently, this is allowed */
1697       *pMem = VK_NULL_HANDLE;
1698       return VK_SUCCESS;
1699    }
1700 
1701    vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
1702       switch ((unsigned)ext->sType) {
1703       case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
1704          host_ptr_info = (VkImportMemoryHostPointerInfoEXT*)ext;
1705          assert(host_ptr_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT);
1706          break;
1707       case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
1708          export_info = (VkExportMemoryAllocateInfo*)ext;
1709          assert(!export_info->handleTypes || export_info->handleTypes == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1710          break;
1711       case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
1712          import_info = (VkImportMemoryFdInfoKHR*)ext;
1713          assert(import_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1714          break;
1715       case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT: {
1716          VkMemoryPriorityAllocateInfoEXT *prio = (VkMemoryPriorityAllocateInfoEXT*)ext;
1717          priority = get_mem_priority(prio->priority);
1718          break;
1719       }
1720       default:
1721          break;
1722       }
1723    }
1724 
1725 #ifdef PIPE_MEMORY_FD
1726    if (import_info != NULL && import_info->fd < 0) {
1727       return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
1728    }
1729 #endif
1730 
1731    mem = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*mem), 8,
1732                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1733    if (mem == NULL)
1734       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1735 
1736    vk_object_base_init(&device->vk, &mem->base,
1737                        VK_OBJECT_TYPE_DEVICE_MEMORY);
1738 
1739    mem->memory_type = LVP_DEVICE_MEMORY_TYPE_DEFAULT;
1740    mem->backed_fd = -1;
1741    mem->size = pAllocateInfo->allocationSize;
1742 
1743    if (host_ptr_info) {
1744       mem->pmem = host_ptr_info->pHostPointer;
1745       mem->memory_type = LVP_DEVICE_MEMORY_TYPE_USER_PTR;
1746    }
1747 #ifdef PIPE_MEMORY_FD
1748    else if(import_info) {
1749       uint64_t size;
1750       if(!device->pscreen->import_memory_fd(device->pscreen, import_info->fd, &mem->pmem, &size)) {
1751          close(import_info->fd);
1752          error = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1753          goto fail;
1754       }
1755       if(size < pAllocateInfo->allocationSize) {
1756          device->pscreen->free_memory_fd(device->pscreen, mem->pmem);
1757          close(import_info->fd);
1758          goto fail;
1759       }
1760       if (export_info && export_info->handleTypes) {
1761          mem->backed_fd = import_info->fd;
1762       }
1763       else {
1764          close(import_info->fd);
1765       }
1766       mem->memory_type = LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD;
1767    }
1768    else if (export_info && export_info->handleTypes) {
1769       mem->pmem = device->pscreen->allocate_memory_fd(device->pscreen, pAllocateInfo->allocationSize, &mem->backed_fd);
1770       if (!mem->pmem || mem->backed_fd < 0) {
1771          goto fail;
1772       }
1773       mem->memory_type = LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD;
1774    }
1775 #endif
1776    else {
1777       mem->pmem = device->pscreen->allocate_memory(device->pscreen, pAllocateInfo->allocationSize);
1778       if (!mem->pmem) {
1779          goto fail;
1780       }
1781       if (device->poison_mem)
1782          /* this is a value that will definitely break things */
1783          memset(mem->pmem, UINT8_MAX / 2 + 1, pAllocateInfo->allocationSize);
1784       set_mem_priority(mem, priority);
1785    }
1786 
1787    mem->type_index = pAllocateInfo->memoryTypeIndex;
1788 
1789    *pMem = lvp_device_memory_to_handle(mem);
1790 
1791    return VK_SUCCESS;
1792 
1793 fail:
1794    vk_free2(&device->vk.alloc, pAllocator, mem);
1795    return vk_error(device, error);
1796 }
1797 
lvp_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)1798 VKAPI_ATTR void VKAPI_CALL lvp_FreeMemory(
1799    VkDevice                                    _device,
1800    VkDeviceMemory                              _mem,
1801    const VkAllocationCallbacks*                pAllocator)
1802 {
1803    LVP_FROM_HANDLE(lvp_device, device, _device);
1804    LVP_FROM_HANDLE(lvp_device_memory, mem, _mem);
1805 
1806    if (mem == NULL)
1807       return;
1808 
1809    switch(mem->memory_type) {
1810    case LVP_DEVICE_MEMORY_TYPE_DEFAULT:
1811       device->pscreen->free_memory(device->pscreen, mem->pmem);
1812       break;
1813 #ifdef PIPE_MEMORY_FD
1814    case LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD:
1815       device->pscreen->free_memory_fd(device->pscreen, mem->pmem);
1816       if(mem->backed_fd >= 0)
1817          close(mem->backed_fd);
1818       break;
1819 #endif
1820    case LVP_DEVICE_MEMORY_TYPE_USER_PTR:
1821    default:
1822       break;
1823    }
1824    vk_object_base_finish(&mem->base);
1825    vk_free2(&device->vk.alloc, pAllocator, mem);
1826 
1827 }
1828 
lvp_MapMemory2KHR(VkDevice _device,const VkMemoryMapInfoKHR * pMemoryMapInfo,void ** ppData)1829 VKAPI_ATTR VkResult VKAPI_CALL lvp_MapMemory2KHR(
1830     VkDevice                                    _device,
1831     const VkMemoryMapInfoKHR*                   pMemoryMapInfo,
1832     void**                                      ppData)
1833 {
1834    LVP_FROM_HANDLE(lvp_device, device, _device);
1835    LVP_FROM_HANDLE(lvp_device_memory, mem, pMemoryMapInfo->memory);
1836    void *map;
1837    if (mem == NULL) {
1838       *ppData = NULL;
1839       return VK_SUCCESS;
1840    }
1841 
1842    map = device->pscreen->map_memory(device->pscreen, mem->pmem);
1843 
1844    *ppData = (char *)map + pMemoryMapInfo->offset;
1845    return VK_SUCCESS;
1846 }
1847 
lvp_UnmapMemory2KHR(VkDevice _device,const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo)1848 VKAPI_ATTR VkResult VKAPI_CALL lvp_UnmapMemory2KHR(
1849     VkDevice                                    _device,
1850     const VkMemoryUnmapInfoKHR*                 pMemoryUnmapInfo)
1851 {
1852    LVP_FROM_HANDLE(lvp_device, device, _device);
1853    LVP_FROM_HANDLE(lvp_device_memory, mem, pMemoryUnmapInfo->memory);
1854 
1855    if (mem == NULL)
1856       return VK_SUCCESS;
1857 
1858    device->pscreen->unmap_memory(device->pscreen, mem->pmem);
1859    return VK_SUCCESS;
1860 }
1861 
lvp_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1862 VKAPI_ATTR VkResult VKAPI_CALL lvp_FlushMappedMemoryRanges(
1863    VkDevice                                    _device,
1864    uint32_t                                    memoryRangeCount,
1865    const VkMappedMemoryRange*                  pMemoryRanges)
1866 {
1867    return VK_SUCCESS;
1868 }
1869 
lvp_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1870 VKAPI_ATTR VkResult VKAPI_CALL lvp_InvalidateMappedMemoryRanges(
1871    VkDevice                                    _device,
1872    uint32_t                                    memoryRangeCount,
1873    const VkMappedMemoryRange*                  pMemoryRanges)
1874 {
1875    return VK_SUCCESS;
1876 }
1877 
lvp_GetDeviceBufferMemoryRequirements(VkDevice _device,const VkDeviceBufferMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1878 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceBufferMemoryRequirements(
1879     VkDevice                                    _device,
1880     const VkDeviceBufferMemoryRequirements*     pInfo,
1881     VkMemoryRequirements2*                      pMemoryRequirements)
1882 {
1883    pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
1884    pMemoryRequirements->memoryRequirements.alignment = 64;
1885    pMemoryRequirements->memoryRequirements.size = 0;
1886 
1887    VkBuffer _buffer;
1888    if (lvp_CreateBuffer(_device, pInfo->pCreateInfo, NULL, &_buffer) != VK_SUCCESS)
1889       return;
1890    LVP_FROM_HANDLE(lvp_buffer, buffer, _buffer);
1891    pMemoryRequirements->memoryRequirements.size = buffer->total_size;
1892    lvp_DestroyBuffer(_device, _buffer, NULL);
1893 }
1894 
lvp_GetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1895 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceImageSparseMemoryRequirements(
1896     VkDevice                                    device,
1897     const VkDeviceImageMemoryRequirements*      pInfo,
1898     uint32_t*                                   pSparseMemoryRequirementCount,
1899     VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements)
1900 {
1901    stub();
1902 }
1903 
lvp_GetDeviceImageMemoryRequirements(VkDevice _device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1904 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceImageMemoryRequirements(
1905     VkDevice                                    _device,
1906     const VkDeviceImageMemoryRequirements*     pInfo,
1907     VkMemoryRequirements2*                      pMemoryRequirements)
1908 {
1909    pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
1910    pMemoryRequirements->memoryRequirements.alignment = 0;
1911    pMemoryRequirements->memoryRequirements.size = 0;
1912 
1913    VkImage _image;
1914    if (lvp_CreateImage(_device, pInfo->pCreateInfo, NULL, &_image) != VK_SUCCESS)
1915       return;
1916    LVP_FROM_HANDLE(lvp_image, image, _image);
1917    pMemoryRequirements->memoryRequirements.size = image->size;
1918    pMemoryRequirements->memoryRequirements.alignment = image->alignment;
1919    lvp_DestroyImage(_device, _image, NULL);
1920 }
1921 
lvp_GetBufferMemoryRequirements(VkDevice device,VkBuffer _buffer,VkMemoryRequirements * pMemoryRequirements)1922 VKAPI_ATTR void VKAPI_CALL lvp_GetBufferMemoryRequirements(
1923    VkDevice                                    device,
1924    VkBuffer                                    _buffer,
1925    VkMemoryRequirements*                       pMemoryRequirements)
1926 {
1927    LVP_FROM_HANDLE(lvp_buffer, buffer, _buffer);
1928 
1929    /* The Vulkan spec (git aaed022) says:
1930     *
1931     *    memoryTypeBits is a bitfield and contains one bit set for every
1932     *    supported memory type for the resource. The bit `1<<i` is set if and
1933     *    only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1934     *    structure for the physical device is supported.
1935     *
1936     * We support exactly one memory type.
1937     */
1938    pMemoryRequirements->memoryTypeBits = 1;
1939 
1940    pMemoryRequirements->size = buffer->total_size;
1941    pMemoryRequirements->alignment = 64;
1942 }
1943 
lvp_GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1944 VKAPI_ATTR void VKAPI_CALL lvp_GetBufferMemoryRequirements2(
1945    VkDevice                                     device,
1946    const VkBufferMemoryRequirementsInfo2       *pInfo,
1947    VkMemoryRequirements2                       *pMemoryRequirements)
1948 {
1949    lvp_GetBufferMemoryRequirements(device, pInfo->buffer,
1950                                    &pMemoryRequirements->memoryRequirements);
1951    vk_foreach_struct(ext, pMemoryRequirements->pNext) {
1952       switch (ext->sType) {
1953       case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
1954          VkMemoryDedicatedRequirements *req =
1955             (VkMemoryDedicatedRequirements *) ext;
1956          req->requiresDedicatedAllocation = false;
1957          req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
1958          break;
1959       }
1960       default:
1961          break;
1962       }
1963    }
1964 }
1965 
lvp_GetImageMemoryRequirements(VkDevice device,VkImage _image,VkMemoryRequirements * pMemoryRequirements)1966 VKAPI_ATTR void VKAPI_CALL lvp_GetImageMemoryRequirements(
1967    VkDevice                                    device,
1968    VkImage                                     _image,
1969    VkMemoryRequirements*                       pMemoryRequirements)
1970 {
1971    LVP_FROM_HANDLE(lvp_image, image, _image);
1972    pMemoryRequirements->memoryTypeBits = 1;
1973 
1974    pMemoryRequirements->size = image->size;
1975    pMemoryRequirements->alignment = image->alignment;
1976 }
1977 
lvp_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1978 VKAPI_ATTR void VKAPI_CALL lvp_GetImageMemoryRequirements2(
1979    VkDevice                                    device,
1980    const VkImageMemoryRequirementsInfo2       *pInfo,
1981    VkMemoryRequirements2                      *pMemoryRequirements)
1982 {
1983    lvp_GetImageMemoryRequirements(device, pInfo->image,
1984                                   &pMemoryRequirements->memoryRequirements);
1985 
1986    vk_foreach_struct(ext, pMemoryRequirements->pNext) {
1987       switch (ext->sType) {
1988       case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
1989          VkMemoryDedicatedRequirements *req =
1990             (VkMemoryDedicatedRequirements *) ext;
1991          req->requiresDedicatedAllocation = false;
1992          req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
1993          break;
1994       }
1995       default:
1996          break;
1997       }
1998    }
1999 }
2000 
lvp_GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)2001 VKAPI_ATTR void VKAPI_CALL lvp_GetImageSparseMemoryRequirements(
2002    VkDevice                                    device,
2003    VkImage                                     image,
2004    uint32_t*                                   pSparseMemoryRequirementCount,
2005    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements)
2006 {
2007    stub();
2008 }
2009 
lvp_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)2010 VKAPI_ATTR void VKAPI_CALL lvp_GetImageSparseMemoryRequirements2(
2011    VkDevice                                    device,
2012    const VkImageSparseMemoryRequirementsInfo2* pInfo,
2013    uint32_t* pSparseMemoryRequirementCount,
2014    VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
2015 {
2016    stub();
2017 }
2018 
lvp_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)2019 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceMemoryCommitment(
2020    VkDevice                                    device,
2021    VkDeviceMemory                              memory,
2022    VkDeviceSize*                               pCommittedMemoryInBytes)
2023 {
2024    *pCommittedMemoryInBytes = 0;
2025 }
2026 
lvp_BindBufferMemory2(VkDevice _device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2027 VKAPI_ATTR VkResult VKAPI_CALL lvp_BindBufferMemory2(VkDevice _device,
2028                                uint32_t bindInfoCount,
2029                                const VkBindBufferMemoryInfo *pBindInfos)
2030 {
2031    LVP_FROM_HANDLE(lvp_device, device, _device);
2032    for (uint32_t i = 0; i < bindInfoCount; ++i) {
2033       LVP_FROM_HANDLE(lvp_device_memory, mem, pBindInfos[i].memory);
2034       LVP_FROM_HANDLE(lvp_buffer, buffer, pBindInfos[i].buffer);
2035       VkBindMemoryStatusKHR *status = (void*)vk_find_struct_const(&pBindInfos[i], BIND_MEMORY_STATUS_KHR);
2036 
2037       buffer->pmem = mem->pmem;
2038       buffer->offset = pBindInfos[i].memoryOffset;
2039       device->pscreen->resource_bind_backing(device->pscreen,
2040                                              buffer->bo,
2041                                              mem->pmem,
2042                                              pBindInfos[i].memoryOffset);
2043       if (status)
2044          *status->pResult = VK_SUCCESS;
2045    }
2046    return VK_SUCCESS;
2047 }
2048 
2049 static VkResult
lvp_image_plane_bind(struct lvp_device * device,struct lvp_image_plane * plane,struct lvp_device_memory * mem,VkDeviceSize memory_offset,VkDeviceSize * plane_offset)2050 lvp_image_plane_bind(struct lvp_device *device,
2051                      struct lvp_image_plane *plane,
2052                      struct lvp_device_memory *mem,
2053                      VkDeviceSize memory_offset,
2054                      VkDeviceSize *plane_offset)
2055 {
2056    if (!device->pscreen->resource_bind_backing(device->pscreen,
2057                                                plane->bo,
2058                                                mem->pmem,
2059                                                memory_offset + *plane_offset)) {
2060       /* This is probably caused by the texture being too large, so let's
2061        * report this as the *closest* allowed error-code. It's not ideal,
2062        * but it's unlikely that anyone will care too much.
2063        */
2064       return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
2065    }
2066    plane->pmem = mem->pmem;
2067    plane->memory_offset = memory_offset;
2068    plane->plane_offset = *plane_offset;
2069    *plane_offset += plane->size;
2070    return VK_SUCCESS;
2071 }
2072 
2073 
lvp_BindImageMemory2(VkDevice _device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2074 VKAPI_ATTR VkResult VKAPI_CALL lvp_BindImageMemory2(VkDevice _device,
2075                               uint32_t bindInfoCount,
2076                               const VkBindImageMemoryInfo *pBindInfos)
2077 {
2078    LVP_FROM_HANDLE(lvp_device, device, _device);
2079    VkResult res = VK_SUCCESS;
2080    for (uint32_t i = 0; i < bindInfoCount; ++i) {
2081       const VkBindImageMemoryInfo *bind_info = &pBindInfos[i];
2082       LVP_FROM_HANDLE(lvp_device_memory, mem, bind_info->memory);
2083       LVP_FROM_HANDLE(lvp_image, image, bind_info->image);
2084       VkBindMemoryStatusKHR *status = (void*)vk_find_struct_const(&pBindInfos[i], BIND_MEMORY_STATUS_KHR);
2085       bool did_bind = false;
2086 
2087       vk_foreach_struct_const(s, bind_info->pNext) {
2088          switch (s->sType) {
2089          case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
2090             const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
2091                (const VkBindImageMemorySwapchainInfoKHR *) s;
2092             struct lvp_image *swapchain_image =
2093                lvp_swapchain_get_image(swapchain_info->swapchain,
2094                                        swapchain_info->imageIndex);
2095 
2096             image->planes[0].pmem = swapchain_image->planes[0].pmem;
2097             image->planes[0].memory_offset = swapchain_image->planes[0].memory_offset;
2098             device->pscreen->resource_bind_backing(device->pscreen,
2099                                                    image->planes[0].bo,
2100                                                    image->planes[0].pmem,
2101                                                    image->planes[0].memory_offset);
2102             did_bind = true;
2103             if (status)
2104                *status->pResult = VK_SUCCESS;
2105             break;
2106          }
2107          default:
2108             break;
2109          }
2110       }
2111 
2112       if (!did_bind) {
2113          uint64_t offset_B = 0;
2114          VkResult result;
2115          if (image->disjoint) {
2116             const VkBindImagePlaneMemoryInfo *plane_info =
2117                vk_find_struct_const(pBindInfos[i].pNext, BIND_IMAGE_PLANE_MEMORY_INFO);
2118             uint8_t plane = lvp_image_aspects_to_plane(image, plane_info->planeAspect);
2119             result = lvp_image_plane_bind(device, &image->planes[plane],
2120                                           mem, bind_info->memoryOffset, &offset_B);
2121             if (status)
2122                *status->pResult = result;
2123             if (result != VK_SUCCESS)
2124                return result;
2125          } else {
2126             VkResult fail = VK_SUCCESS;
2127             for (unsigned plane = 0; plane < image->plane_count; plane++) {
2128                result = lvp_image_plane_bind(device, &image->planes[plane],
2129                                              mem, bind_info->memoryOffset, &offset_B);
2130                if (status)
2131                   *status->pResult = res;
2132                if (result != VK_SUCCESS)
2133                   fail = result;
2134             }
2135             if (fail != VK_SUCCESS)
2136                return fail;
2137          }
2138       }
2139    }
2140    return res;
2141 }
2142 
2143 #ifdef PIPE_MEMORY_FD
2144 
2145 VkResult
lvp_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFD)2146 lvp_GetMemoryFdKHR(VkDevice _device, const VkMemoryGetFdInfoKHR *pGetFdInfo, int *pFD)
2147 {
2148    LVP_FROM_HANDLE(lvp_device_memory, memory, pGetFdInfo->memory);
2149 
2150    assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2151    assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
2152 
2153    *pFD = dup(memory->backed_fd);
2154    assert(*pFD >= 0);
2155    return VK_SUCCESS;
2156 }
2157 
2158 VkResult
lvp_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)2159 lvp_GetMemoryFdPropertiesKHR(VkDevice _device,
2160                              VkExternalMemoryHandleTypeFlagBits handleType,
2161                              int fd,
2162                              VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2163 {
2164    LVP_FROM_HANDLE(lvp_device, device, _device);
2165 
2166    assert(pMemoryFdProperties->sType == VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR);
2167 
2168    if(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT) {
2169       // There is only one memoryType so select this one
2170       pMemoryFdProperties->memoryTypeBits = 1;
2171    }
2172    else
2173       return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
2174    return VK_SUCCESS;
2175 }
2176 
2177 #endif
2178 
lvp_QueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)2179 VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueBindSparse(
2180    VkQueue                                     queue,
2181    uint32_t                                    bindInfoCount,
2182    const VkBindSparseInfo*                     pBindInfo,
2183    VkFence                                     fence)
2184 {
2185    stub_return(VK_ERROR_INCOMPATIBLE_DRIVER);
2186 }
2187 
lvp_CreateEvent(VkDevice _device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)2188 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateEvent(
2189    VkDevice                                    _device,
2190    const VkEventCreateInfo*                    pCreateInfo,
2191    const VkAllocationCallbacks*                pAllocator,
2192    VkEvent*                                    pEvent)
2193 {
2194    LVP_FROM_HANDLE(lvp_device, device, _device);
2195    struct lvp_event *event = vk_alloc2(&device->vk.alloc, pAllocator,
2196                                        sizeof(*event), 8,
2197                                        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2198 
2199    if (!event)
2200       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2201 
2202    vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT);
2203    *pEvent = lvp_event_to_handle(event);
2204    event->event_storage = 0;
2205 
2206    return VK_SUCCESS;
2207 }
2208 
lvp_DestroyEvent(VkDevice _device,VkEvent _event,const VkAllocationCallbacks * pAllocator)2209 VKAPI_ATTR void VKAPI_CALL lvp_DestroyEvent(
2210    VkDevice                                    _device,
2211    VkEvent                                     _event,
2212    const VkAllocationCallbacks*                pAllocator)
2213 {
2214    LVP_FROM_HANDLE(lvp_device, device, _device);
2215    LVP_FROM_HANDLE(lvp_event, event, _event);
2216 
2217    if (!event)
2218       return;
2219 
2220    vk_object_base_finish(&event->base);
2221    vk_free2(&device->vk.alloc, pAllocator, event);
2222 }
2223 
lvp_GetEventStatus(VkDevice _device,VkEvent _event)2224 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetEventStatus(
2225    VkDevice                                    _device,
2226    VkEvent                                     _event)
2227 {
2228    LVP_FROM_HANDLE(lvp_event, event, _event);
2229    if (event->event_storage == 1)
2230       return VK_EVENT_SET;
2231    return VK_EVENT_RESET;
2232 }
2233 
lvp_SetEvent(VkDevice _device,VkEvent _event)2234 VKAPI_ATTR VkResult VKAPI_CALL lvp_SetEvent(
2235    VkDevice                                    _device,
2236    VkEvent                                     _event)
2237 {
2238    LVP_FROM_HANDLE(lvp_event, event, _event);
2239    event->event_storage = 1;
2240 
2241    return VK_SUCCESS;
2242 }
2243 
lvp_ResetEvent(VkDevice _device,VkEvent _event)2244 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetEvent(
2245    VkDevice                                    _device,
2246    VkEvent                                     _event)
2247 {
2248    LVP_FROM_HANDLE(lvp_event, event, _event);
2249    event->event_storage = 0;
2250 
2251    return VK_SUCCESS;
2252 }
2253 
lvp_CreateSampler(VkDevice _device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2254 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSampler(
2255    VkDevice                                    _device,
2256    const VkSamplerCreateInfo*                  pCreateInfo,
2257    const VkAllocationCallbacks*                pAllocator,
2258    VkSampler*                                  pSampler)
2259 {
2260    LVP_FROM_HANDLE(lvp_device, device, _device);
2261    struct lvp_sampler *sampler;
2262 
2263    sampler = vk_sampler_create(&device->vk, pCreateInfo,
2264                                pAllocator, sizeof(*sampler));
2265    if (!sampler)
2266       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2267 
2268    struct pipe_sampler_state state = {0};
2269    VkClearColorValue border_color =
2270       vk_sampler_border_color_value(pCreateInfo, NULL);
2271    STATIC_ASSERT(sizeof(state.border_color) == sizeof(border_color));
2272 
2273    state.wrap_s = vk_conv_wrap_mode(pCreateInfo->addressModeU);
2274    state.wrap_t = vk_conv_wrap_mode(pCreateInfo->addressModeV);
2275    state.wrap_r = vk_conv_wrap_mode(pCreateInfo->addressModeW);
2276    state.min_img_filter = pCreateInfo->minFilter == VK_FILTER_LINEAR ? PIPE_TEX_FILTER_LINEAR : PIPE_TEX_FILTER_NEAREST;
2277    state.min_mip_filter = pCreateInfo->mipmapMode == VK_SAMPLER_MIPMAP_MODE_LINEAR ? PIPE_TEX_MIPFILTER_LINEAR : PIPE_TEX_MIPFILTER_NEAREST;
2278    state.mag_img_filter = pCreateInfo->magFilter == VK_FILTER_LINEAR ? PIPE_TEX_FILTER_LINEAR : PIPE_TEX_FILTER_NEAREST;
2279    state.min_lod = pCreateInfo->minLod;
2280    state.max_lod = pCreateInfo->maxLod;
2281    state.lod_bias = pCreateInfo->mipLodBias;
2282    if (pCreateInfo->anisotropyEnable)
2283       state.max_anisotropy = pCreateInfo->maxAnisotropy;
2284    else
2285       state.max_anisotropy = 1;
2286    state.unnormalized_coords = pCreateInfo->unnormalizedCoordinates;
2287    state.compare_mode = pCreateInfo->compareEnable ? PIPE_TEX_COMPARE_R_TO_TEXTURE : PIPE_TEX_COMPARE_NONE;
2288    state.compare_func = pCreateInfo->compareOp;
2289    state.seamless_cube_map = !(pCreateInfo->flags & VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT);
2290    STATIC_ASSERT((unsigned)VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE == (unsigned)PIPE_TEX_REDUCTION_WEIGHTED_AVERAGE);
2291    STATIC_ASSERT((unsigned)VK_SAMPLER_REDUCTION_MODE_MIN == (unsigned)PIPE_TEX_REDUCTION_MIN);
2292    STATIC_ASSERT((unsigned)VK_SAMPLER_REDUCTION_MODE_MAX == (unsigned)PIPE_TEX_REDUCTION_MAX);
2293    state.reduction_mode = (enum pipe_tex_reduction_mode)sampler->vk.reduction_mode;
2294    memcpy(&state.border_color, &border_color, sizeof(border_color));
2295 
2296    simple_mtx_lock(&device->queue.lock);
2297    sampler->texture_handle = (void *)(uintptr_t)device->queue.ctx->create_texture_handle(device->queue.ctx, NULL, &state);
2298    simple_mtx_unlock(&device->queue.lock);
2299 
2300    lp_jit_sampler_from_pipe(&sampler->desc.sampler, &state);
2301    sampler->desc.texture.sampler_index = sampler->texture_handle->sampler_index;
2302 
2303    *pSampler = lvp_sampler_to_handle(sampler);
2304 
2305    return VK_SUCCESS;
2306 }
2307 
lvp_DestroySampler(VkDevice _device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)2308 VKAPI_ATTR void VKAPI_CALL lvp_DestroySampler(
2309    VkDevice                                    _device,
2310    VkSampler                                   _sampler,
2311    const VkAllocationCallbacks*                pAllocator)
2312 {
2313    LVP_FROM_HANDLE(lvp_device, device, _device);
2314    LVP_FROM_HANDLE(lvp_sampler, sampler, _sampler);
2315 
2316    if (!_sampler)
2317       return;
2318 
2319    simple_mtx_lock(&device->queue.lock);
2320    device->queue.ctx->delete_texture_handle(device->queue.ctx, (uint64_t)(uintptr_t)sampler->texture_handle);
2321    simple_mtx_unlock(&device->queue.lock);
2322 
2323    vk_sampler_destroy(&device->vk, pAllocator, &sampler->vk);
2324 }
2325 
lvp_CreatePrivateDataSlot(VkDevice _device,const VkPrivateDataSlotCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPrivateDataSlot * pPrivateDataSlot)2326 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePrivateDataSlot(
2327    VkDevice                                    _device,
2328    const VkPrivateDataSlotCreateInfo*          pCreateInfo,
2329    const VkAllocationCallbacks*                pAllocator,
2330    VkPrivateDataSlot*                          pPrivateDataSlot)
2331 {
2332    LVP_FROM_HANDLE(lvp_device, device, _device);
2333    return vk_private_data_slot_create(&device->vk, pCreateInfo, pAllocator,
2334                                       pPrivateDataSlot);
2335 }
2336 
lvp_DestroyPrivateDataSlot(VkDevice _device,VkPrivateDataSlot privateDataSlot,const VkAllocationCallbacks * pAllocator)2337 VKAPI_ATTR void VKAPI_CALL lvp_DestroyPrivateDataSlot(
2338    VkDevice                                    _device,
2339    VkPrivateDataSlot                           privateDataSlot,
2340    const VkAllocationCallbacks*                pAllocator)
2341 {
2342    LVP_FROM_HANDLE(lvp_device, device, _device);
2343    vk_private_data_slot_destroy(&device->vk, privateDataSlot, pAllocator);
2344 }
2345 
lvp_SetPrivateData(VkDevice _device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t data)2346 VKAPI_ATTR VkResult VKAPI_CALL lvp_SetPrivateData(
2347    VkDevice                                    _device,
2348    VkObjectType                                objectType,
2349    uint64_t                                    objectHandle,
2350    VkPrivateDataSlot                           privateDataSlot,
2351    uint64_t                                    data)
2352 {
2353    LVP_FROM_HANDLE(lvp_device, device, _device);
2354    return vk_object_base_set_private_data(&device->vk, objectType,
2355                                           objectHandle, privateDataSlot,
2356                                           data);
2357 }
2358 
lvp_GetPrivateData(VkDevice _device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t * pData)2359 VKAPI_ATTR void VKAPI_CALL lvp_GetPrivateData(
2360    VkDevice                                    _device,
2361    VkObjectType                                objectType,
2362    uint64_t                                    objectHandle,
2363    VkPrivateDataSlot                           privateDataSlot,
2364    uint64_t*                                   pData)
2365 {
2366    LVP_FROM_HANDLE(lvp_device, device, _device);
2367    vk_object_base_get_private_data(&device->vk, objectType, objectHandle,
2368                                    privateDataSlot, pData);
2369 }
2370 
lvp_CreateIndirectCommandsLayoutNV(VkDevice _device,const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkIndirectCommandsLayoutNV * pIndirectCommandsLayout)2371 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateIndirectCommandsLayoutNV(
2372     VkDevice                                    _device,
2373     const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo,
2374     const VkAllocationCallbacks*                pAllocator,
2375     VkIndirectCommandsLayoutNV*                 pIndirectCommandsLayout)
2376 {
2377    LVP_FROM_HANDLE(lvp_device, device, _device);
2378    struct lvp_indirect_command_layout *dlayout;
2379 
2380    size_t size = sizeof(*dlayout) + pCreateInfo->tokenCount * sizeof(VkIndirectCommandsLayoutTokenNV);
2381 
2382    dlayout =
2383       vk_zalloc2(&device->vk.alloc, pAllocator, size, alignof(struct lvp_indirect_command_layout),
2384                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2385    if (!dlayout)
2386       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2387 
2388    vk_object_base_init(&device->vk, &dlayout->base, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV);
2389 
2390    dlayout->stream_count = pCreateInfo->streamCount;
2391    dlayout->token_count = pCreateInfo->tokenCount;
2392    for (unsigned i = 0; i < pCreateInfo->streamCount; i++)
2393       dlayout->stream_strides[i] = pCreateInfo->pStreamStrides[i];
2394    typed_memcpy(dlayout->tokens, pCreateInfo->pTokens, pCreateInfo->tokenCount);
2395 
2396    *pIndirectCommandsLayout = lvp_indirect_command_layout_to_handle(dlayout);
2397    return VK_SUCCESS;
2398 }
2399 
lvp_DestroyIndirectCommandsLayoutNV(VkDevice _device,VkIndirectCommandsLayoutNV indirectCommandsLayout,const VkAllocationCallbacks * pAllocator)2400 VKAPI_ATTR void VKAPI_CALL lvp_DestroyIndirectCommandsLayoutNV(
2401     VkDevice                                    _device,
2402     VkIndirectCommandsLayoutNV                  indirectCommandsLayout,
2403     const VkAllocationCallbacks*                pAllocator)
2404 {
2405    LVP_FROM_HANDLE(lvp_device, device, _device);
2406    VK_FROM_HANDLE(lvp_indirect_command_layout, layout, indirectCommandsLayout);
2407 
2408    if (!layout)
2409       return;
2410 
2411    vk_object_base_finish(&layout->base);
2412    vk_free2(&device->vk.alloc, pAllocator, layout);
2413 }
2414 
2415 enum vk_cmd_type
lvp_nv_dgc_token_to_cmd_type(const VkIndirectCommandsLayoutTokenNV * token)2416 lvp_nv_dgc_token_to_cmd_type(const VkIndirectCommandsLayoutTokenNV *token)
2417 {
2418    switch (token->tokenType) {
2419       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV:
2420          return VK_CMD_BIND_PIPELINE_SHADER_GROUP_NV;
2421       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV:
2422          if (token->indirectStateFlags & VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV) {
2423             return VK_CMD_SET_FRONT_FACE;
2424          }
2425          assert(!"unknown token type!");
2426          break;
2427       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV:
2428          return VK_CMD_PUSH_CONSTANTS2_KHR;
2429       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV:
2430          return VK_CMD_BIND_INDEX_BUFFER;
2431       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV:
2432         return VK_CMD_BIND_VERTEX_BUFFERS2;
2433       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV:
2434          return VK_CMD_DRAW_INDEXED_INDIRECT;
2435       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV:
2436          return VK_CMD_DRAW_INDIRECT;
2437       // only available if VK_EXT_mesh_shader is supported
2438       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV:
2439          return VK_CMD_DRAW_MESH_TASKS_INDIRECT_EXT;
2440       // only available if VK_NV_mesh_shader is supported
2441       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV:
2442          unreachable("NV_mesh_shader unsupported!");
2443       default:
2444          unreachable("unknown token type");
2445    }
2446    return UINT32_MAX;
2447 }
2448 
lvp_GetGeneratedCommandsMemoryRequirementsNV(VkDevice device,const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2449 VKAPI_ATTR void VKAPI_CALL lvp_GetGeneratedCommandsMemoryRequirementsNV(
2450     VkDevice                                    device,
2451     const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo,
2452     VkMemoryRequirements2*                      pMemoryRequirements)
2453 {
2454    VK_FROM_HANDLE(lvp_indirect_command_layout, dlayout, pInfo->indirectCommandsLayout);
2455 
2456    size_t size = sizeof(struct list_head);
2457 
2458    for (unsigned i = 0; i < dlayout->token_count; i++) {
2459       const VkIndirectCommandsLayoutTokenNV *token = &dlayout->tokens[i];
2460       UNUSED struct vk_cmd_queue_entry *cmd;
2461       enum vk_cmd_type type = lvp_nv_dgc_token_to_cmd_type(token);
2462       size += vk_cmd_queue_type_sizes[type];
2463 
2464       switch (token->tokenType) {
2465       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV:
2466          size += sizeof(*cmd->u.bind_vertex_buffers.buffers);
2467          size += sizeof(*cmd->u.bind_vertex_buffers.offsets);
2468          size += sizeof(*cmd->u.bind_vertex_buffers2.sizes) + sizeof(*cmd->u.bind_vertex_buffers2.strides);
2469          break;
2470       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV:
2471          size += token->pushconstantSize + sizeof(VkPushConstantsInfoKHR);
2472          break;
2473       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV:
2474       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV:
2475       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV:
2476       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV:
2477       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV:
2478       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV:
2479       case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV:
2480          break;
2481       default:
2482          unreachable("unknown type!");
2483       }
2484    }
2485 
2486    size *= pInfo->maxSequencesCount;
2487 
2488    pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
2489    pMemoryRequirements->memoryRequirements.alignment = 4;
2490    pMemoryRequirements->memoryRequirements.size = align(size, pMemoryRequirements->memoryRequirements.alignment);
2491 }
2492 
lvp_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)2493 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceExternalFenceProperties(
2494    VkPhysicalDevice                           physicalDevice,
2495    const VkPhysicalDeviceExternalFenceInfo    *pExternalFenceInfo,
2496    VkExternalFenceProperties                  *pExternalFenceProperties)
2497 {
2498    pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2499    pExternalFenceProperties->compatibleHandleTypes = 0;
2500    pExternalFenceProperties->externalFenceFeatures = 0;
2501 }
2502 
lvp_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)2503 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceExternalSemaphoreProperties(
2504    VkPhysicalDevice                            physicalDevice,
2505    const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
2506    VkExternalSemaphoreProperties               *pExternalSemaphoreProperties)
2507 {
2508    pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
2509    pExternalSemaphoreProperties->compatibleHandleTypes = 0;
2510    pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
2511 }
2512 
2513 static const VkTimeDomainEXT lvp_time_domains[] = {
2514         VK_TIME_DOMAIN_DEVICE_EXT,
2515         VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
2516 };
2517 
lvp_GetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice,uint32_t * pTimeDomainCount,VkTimeDomainEXT * pTimeDomains)2518 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetPhysicalDeviceCalibrateableTimeDomainsEXT(
2519    VkPhysicalDevice physicalDevice,
2520    uint32_t *pTimeDomainCount,
2521    VkTimeDomainEXT *pTimeDomains)
2522 {
2523    int d;
2524    VK_OUTARRAY_MAKE_TYPED(VkTimeDomainEXT, out, pTimeDomains,
2525                           pTimeDomainCount);
2526 
2527    for (d = 0; d < ARRAY_SIZE(lvp_time_domains); d++) {
2528       vk_outarray_append_typed(VkTimeDomainEXT, &out, i) {
2529          *i = lvp_time_domains[d];
2530       }
2531     }
2532 
2533     return vk_outarray_status(&out);
2534 }
2535 
lvp_GetCalibratedTimestampsEXT(VkDevice device,uint32_t timestampCount,const VkCalibratedTimestampInfoEXT * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation)2536 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetCalibratedTimestampsEXT(
2537    VkDevice device,
2538    uint32_t timestampCount,
2539    const VkCalibratedTimestampInfoEXT *pTimestampInfos,
2540    uint64_t *pTimestamps,
2541    uint64_t *pMaxDeviation)
2542 {
2543    *pMaxDeviation = 1;
2544 
2545    uint64_t now = os_time_get_nano();
2546    for (unsigned i = 0; i < timestampCount; i++) {
2547       pTimestamps[i] = now;
2548    }
2549    return VK_SUCCESS;
2550 }
2551 
lvp_GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)2552 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceGroupPeerMemoryFeatures(
2553     VkDevice device,
2554     uint32_t heapIndex,
2555     uint32_t localDeviceIndex,
2556     uint32_t remoteDeviceIndex,
2557     VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2558 {
2559    *pPeerMemoryFeatures = 0;
2560 }
2561 
lvp_SetDeviceMemoryPriorityEXT(VkDevice _device,VkDeviceMemory _memory,float priority)2562 VKAPI_ATTR void VKAPI_CALL lvp_SetDeviceMemoryPriorityEXT(
2563     VkDevice                                    _device,
2564     VkDeviceMemory                              _memory,
2565     float                                       priority)
2566 {
2567    LVP_FROM_HANDLE(lvp_device_memory, mem, _memory);
2568    set_mem_priority(mem, get_mem_priority(priority));
2569 }
2570 
lvp_GetRenderingAreaGranularityKHR(VkDevice device,const VkRenderingAreaInfoKHR * pRenderingAreaInfo,VkExtent2D * pGranularity)2571 VKAPI_ATTR void VKAPI_CALL lvp_GetRenderingAreaGranularityKHR(
2572     VkDevice                                    device,
2573     const VkRenderingAreaInfoKHR*               pRenderingAreaInfo,
2574     VkExtent2D*                                 pGranularity)
2575 {
2576    VkExtent2D tile_size = {64, 64};
2577    *pGranularity = tile_size;
2578 }
2579