• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_pipeline.h"
12 
13 #include "venus-protocol/vn_protocol_driver_pipeline.h"
14 #include "venus-protocol/vn_protocol_driver_pipeline_cache.h"
15 #include "venus-protocol/vn_protocol_driver_pipeline_layout.h"
16 #include "venus-protocol/vn_protocol_driver_shader_module.h"
17 
18 #include "vn_descriptor_set.h"
19 #include "vn_device.h"
20 #include "vn_physical_device.h"
21 #include "vn_render_pass.h"
22 
23 /**
24  * Fields in the VkGraphicsPipelineCreateInfo pNext chain that we must track
25  * to determine which fields are valid and which must be erased.
26  */
27 struct vn_graphics_pipeline_info_self {
28    union {
29       /* Bitmask exists for testing if any field is set. */
30       uint32_t mask;
31 
32       /* Group the fixes by Vulkan struct. Within each group, sort by struct
33        * order.
34        */
35       struct {
36          /** VkGraphicsPipelineCreateInfo::pStages */
37          bool shader_stages : 1;
38          /** VkGraphicsPipelineCreateInfo::pVertexInputState */
39          bool vertex_input_state : 1;
40          /** VkGraphicsPipelineCreateInfo::pInputAssemblyState */
41          bool input_assembly_state : 1;
42          /** VkGraphicsPipelineCreateInfo::pTessellationState */
43          bool tessellation_state : 1;
44          /** VkGraphicsPipelineCreateInfo::pViewportState */
45          bool viewport_state : 1;
46          /** VkGraphicsPipelineCreateInfo::pRasterizationState */
47          bool rasterization_state : 1;
48          /** VkGraphicsPipelineCreateInfo::pMultisampleState */
49          bool multisample_state : 1;
50          /** VkGraphicsPipelineCreateInfo::pDepthStencilState */
51          bool depth_stencil_state : 1;
52          /** VkGraphicsPipelineCreateInfo::pColorBlendState */
53          bool color_blend_state : 1;
54          /** VkGraphicsPipelineCreateInfo::layout */
55          bool pipeline_layout : 1;
56          /** VkGraphicsPipelineCreateInfo::renderPass */
57          bool render_pass : 1;
58          /** VkGraphicsPipelineCreateInfo::basePipelineHandle */
59          bool base_pipeline_handle : 1;
60 
61          /** VkPipelineViewportStateCreateInfo::pViewports */
62          bool viewport_state_viewports : 1;
63          /** VkPipelineViewportStateCreateInfo::pScissors */
64          bool viewport_state_scissors : 1;
65 
66          /** VkPipelineMultisampleStateCreateInfo::pSampleMask */
67          bool multisample_state_sample_mask : 1;
68       };
69    };
70 };
71 
72 static_assert(sizeof(struct vn_graphics_pipeline_info_self) ==
73                  sizeof(((struct vn_graphics_pipeline_info_self){}).mask),
74               "vn_graphics_pipeline_create_info_self::mask is too small");
75 
76 /**
77  * Fields in the VkGraphicsPipelineCreateInfo pNext chain that we must track
78  * to determine which fields are valid and which must be erased.
79  */
80 struct vn_graphics_pipeline_info_pnext {
81    union {
82       /* Bitmask exists for testing if any field is set. */
83       uint32_t mask;
84 
85       /* Group the fixes by Vulkan struct. Within each group, sort by struct
86        * order.
87        */
88       struct {
89          /** VkPipelineRenderingCreateInfo, all format fields */
90          bool rendering_info_formats : 1;
91       };
92    };
93 };
94 
95 static_assert(sizeof(struct vn_graphics_pipeline_info_pnext) ==
96                  sizeof(((struct vn_graphics_pipeline_info_pnext){}).mask),
97               "vn_graphics_pipeline_create_info_pnext::mask is too small");
98 
99 /**
100  * Description of fixes needed for a single VkGraphicsPipelineCreateInfo
101  * pNext chain.
102  */
103 struct vn_graphics_pipeline_fix_desc {
104    struct vn_graphics_pipeline_info_self self;
105    struct vn_graphics_pipeline_info_pnext pnext;
106 };
107 
108 /**
109  * Typesafe bitmask for VkGraphicsPipelineLibraryFlagsEXT. Named members
110  * reduce long lines.
111  *
112  * From the Vulkan 1.3.215 spec:
113  *
114  *    The state required for a graphics pipeline is divided into vertex input
115  *    state, pre-rasterization shader state, fragment shader state, and
116  *    fragment output state.
117  */
118 struct vn_graphics_pipeline_library_state {
119    union {
120       VkGraphicsPipelineLibraryFlagsEXT mask;
121 
122       struct {
123          /** VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT */
124          bool vertex_input : 1;
125          /** VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT */
126          bool pre_raster_shaders : 1;
127          /** VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT */
128          bool fragment_shader : 1;
129          /** VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT */
130          bool fragment_output : 1;
131       };
132    };
133 };
134 
135 /**
136  * Compact bitmask for the subset of graphics VkDynamicState that
137  * venus needs to track. Named members reduce long lines.
138  *
139  * We want a *compact* bitmask because enum VkDynamicState has large gaps due
140  * to extensions.
141  */
142 struct vn_graphics_dynamic_state {
143    union {
144       uint32_t mask;
145 
146       struct {
147          /** VK_DYNAMIC_STATE_VERTEX_INPUT_EXT **/
148          bool vertex_input : 1;
149          /** VK_DYNAMIC_STATE_VIEWPORT */
150          bool viewport : 1;
151          /** VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT */
152          bool viewport_with_count : 1;
153          /** VK_DYNAMIC_STATE_SAMPLE_MASK_EXT */
154          bool sample_mask : 1;
155          /** VK_DYNAMIC_STATE_SCISSOR */
156          bool scissor : 1;
157          /** VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT */
158          bool scissor_with_count : 1;
159          /** VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE */
160          bool rasterizer_discard_enable : 1;
161       };
162    };
163 };
164 
165 /**
166  * Graphics pipeline state that Venus tracks to determine which fixes are
167  * required in the VkGraphicsPipelineCreateInfo pNext chain.
168  *
169  * This is the pipeline's fully linked state. That is, it includes the state
170  * provided directly in VkGraphicsPipelineCreateInfo and the state provided
171  * indirectly in VkPipelineLibraryCreateInfoKHR.
172  */
173 struct vn_graphics_pipeline_state {
174    /** The GPL state subsets that the pipeline provides. */
175    struct vn_graphics_pipeline_library_state gpl;
176 
177    struct vn_graphics_dynamic_state dynamic;
178    VkShaderStageFlags shader_stages;
179 
180    struct vn_render_pass_state {
181       /**
182        * The attachment aspects accessed by the pipeline.
183        *
184        * Valid if and only if VK_IMAGE_ASPECT_METADATA_BIT is unset.
185        *
186        * In a complete pipeline, this must be valid (and may be empty). In
187        * a pipeline library, this may be invalid. We initialize this to be
188        * invalid, and it remains invalid until we read the attachment info in
189        * the VkGraphicsPipelineCreateInfo chain.
190        *
191        * The app provides the attachment info in
192        * VkGraphicsPipelineCreateInfo::renderPass or
193        * VkPipelineRenderingCreateInfo, but the validity of that info depends
194        * on VkGraphicsPipelineLibraryFlagsEXT.
195        */
196       VkImageAspectFlags attachment_aspects;
197    } render_pass;
198 
199    /** VkPipelineRasterizationStateCreateInfo::rasterizerDiscardEnable
200     *
201     * Valid if and only if gpl.pre_raster_shaders is set.
202     */
203    bool rasterizer_discard_enable;
204 };
205 
206 struct vn_graphics_pipeline {
207    struct vn_pipeline base;
208    struct vn_graphics_pipeline_state state;
209 };
210 
211 /**
212  * Temporary storage for fixes in vkCreateGraphicsPipelines.
213  *
214  * Length of each array is vkCreateGraphicsPipelines::createInfoCount.
215  */
216 struct vn_graphics_pipeline_fix_tmp {
217    VkGraphicsPipelineCreateInfo *infos;
218    VkPipelineMultisampleStateCreateInfo *multisample_state_infos;
219    VkPipelineViewportStateCreateInfo *viewport_state_infos;
220 
221    /* Fixing the pNext chain
222     *
223     * TODO: extend when below or more extensions are supported:
224     * - VK_EXT_pipeline_robustness
225     */
226    VkGraphicsPipelineLibraryCreateInfoEXT *gpl_infos;
227    VkPipelineCreateFlags2CreateInfo *flags2_infos;
228    VkPipelineCreationFeedbackCreateInfo *feedback_infos;
229    VkPipelineFragmentShadingRateStateCreateInfoKHR *fsr_infos;
230    VkPipelineLibraryCreateInfoKHR *library_infos;
231    VkPipelineRenderingCreateInfo *rendering_infos;
232 };
233 
234 /* shader module commands */
235 
236 VkResult
vn_CreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)237 vn_CreateShaderModule(VkDevice device,
238                       const VkShaderModuleCreateInfo *pCreateInfo,
239                       const VkAllocationCallbacks *pAllocator,
240                       VkShaderModule *pShaderModule)
241 {
242    struct vn_device *dev = vn_device_from_handle(device);
243    const VkAllocationCallbacks *alloc =
244       pAllocator ? pAllocator : &dev->base.base.alloc;
245 
246    struct vn_shader_module *mod =
247       vk_zalloc(alloc, sizeof(*mod), VN_DEFAULT_ALIGN,
248                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
249    if (!mod)
250       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
251 
252    vn_object_base_init(&mod->base, VK_OBJECT_TYPE_SHADER_MODULE, &dev->base);
253 
254    VkShaderModule mod_handle = vn_shader_module_to_handle(mod);
255    vn_async_vkCreateShaderModule(dev->primary_ring, device, pCreateInfo, NULL,
256                                  &mod_handle);
257 
258    *pShaderModule = mod_handle;
259 
260    return VK_SUCCESS;
261 }
262 
263 void
vn_DestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)264 vn_DestroyShaderModule(VkDevice device,
265                        VkShaderModule shaderModule,
266                        const VkAllocationCallbacks *pAllocator)
267 {
268    struct vn_device *dev = vn_device_from_handle(device);
269    struct vn_shader_module *mod = vn_shader_module_from_handle(shaderModule);
270    const VkAllocationCallbacks *alloc =
271       pAllocator ? pAllocator : &dev->base.base.alloc;
272 
273    if (!mod)
274       return;
275 
276    vn_async_vkDestroyShaderModule(dev->primary_ring, device, shaderModule,
277                                   NULL);
278 
279    vn_object_base_fini(&mod->base);
280    vk_free(alloc, mod);
281 }
282 
283 /* pipeline layout commands */
284 
285 static void
vn_pipeline_layout_destroy(struct vn_device * dev,struct vn_pipeline_layout * pipeline_layout)286 vn_pipeline_layout_destroy(struct vn_device *dev,
287                            struct vn_pipeline_layout *pipeline_layout)
288 {
289    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
290    if (pipeline_layout->push_descriptor_set_layout) {
291       vn_descriptor_set_layout_unref(
292          dev, pipeline_layout->push_descriptor_set_layout);
293    }
294    vn_async_vkDestroyPipelineLayout(
295       dev->primary_ring, vn_device_to_handle(dev),
296       vn_pipeline_layout_to_handle(pipeline_layout), NULL);
297 
298    vn_object_base_fini(&pipeline_layout->base);
299    vk_free(alloc, pipeline_layout);
300 }
301 
302 static inline struct vn_pipeline_layout *
vn_pipeline_layout_ref(struct vn_device * dev,struct vn_pipeline_layout * pipeline_layout)303 vn_pipeline_layout_ref(struct vn_device *dev,
304                        struct vn_pipeline_layout *pipeline_layout)
305 {
306    vn_refcount_inc(&pipeline_layout->refcount);
307    return pipeline_layout;
308 }
309 
310 static inline void
vn_pipeline_layout_unref(struct vn_device * dev,struct vn_pipeline_layout * pipeline_layout)311 vn_pipeline_layout_unref(struct vn_device *dev,
312                          struct vn_pipeline_layout *pipeline_layout)
313 {
314    if (vn_refcount_dec(&pipeline_layout->refcount))
315       vn_pipeline_layout_destroy(dev, pipeline_layout);
316 }
317 
318 VkResult
vn_CreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)319 vn_CreatePipelineLayout(VkDevice device,
320                         const VkPipelineLayoutCreateInfo *pCreateInfo,
321                         const VkAllocationCallbacks *pAllocator,
322                         VkPipelineLayout *pPipelineLayout)
323 {
324    struct vn_device *dev = vn_device_from_handle(device);
325    /* ignore pAllocator as the pipeline layout is reference-counted */
326    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
327 
328    struct vn_pipeline_layout *layout =
329       vk_zalloc(alloc, sizeof(*layout), VN_DEFAULT_ALIGN,
330                 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
331    if (!layout)
332       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
333 
334    vn_object_base_init(&layout->base, VK_OBJECT_TYPE_PIPELINE_LAYOUT,
335                        &dev->base);
336    layout->refcount = VN_REFCOUNT_INIT(1);
337 
338    for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; i++) {
339       struct vn_descriptor_set_layout *descriptor_set_layout =
340          vn_descriptor_set_layout_from_handle(pCreateInfo->pSetLayouts[i]);
341 
342       /* Avoid null derefs. pSetLayouts may contain VK_NULL_HANDLE.
343        *
344        * From the Vulkan 1.3.254 spec:
345        *    VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter
346        *
347        *    If setLayoutCount is not 0, pSetLayouts must be a valid pointer to
348        *    an array of setLayoutCount valid or VK_NULL_HANDLE
349        *    VkDescriptorSetLayout handles
350        */
351       if (descriptor_set_layout &&
352           descriptor_set_layout->is_push_descriptor) {
353          layout->push_descriptor_set_layout =
354             vn_descriptor_set_layout_ref(dev, descriptor_set_layout);
355          break;
356       }
357    }
358 
359    layout->has_push_constant_ranges = pCreateInfo->pushConstantRangeCount > 0;
360 
361    VkPipelineLayout layout_handle = vn_pipeline_layout_to_handle(layout);
362    vn_async_vkCreatePipelineLayout(dev->primary_ring, device, pCreateInfo,
363                                    NULL, &layout_handle);
364 
365    *pPipelineLayout = layout_handle;
366 
367    return VK_SUCCESS;
368 }
369 
370 void
vn_DestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)371 vn_DestroyPipelineLayout(VkDevice device,
372                          VkPipelineLayout pipelineLayout,
373                          const VkAllocationCallbacks *pAllocator)
374 {
375    struct vn_device *dev = vn_device_from_handle(device);
376    struct vn_pipeline_layout *layout =
377       vn_pipeline_layout_from_handle(pipelineLayout);
378 
379    if (!layout)
380       return;
381 
382    vn_pipeline_layout_unref(dev, layout);
383 }
384 
385 /* pipeline cache commands */
386 
387 VkResult
vn_CreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)388 vn_CreatePipelineCache(VkDevice device,
389                        const VkPipelineCacheCreateInfo *pCreateInfo,
390                        const VkAllocationCallbacks *pAllocator,
391                        VkPipelineCache *pPipelineCache)
392 {
393    struct vn_device *dev = vn_device_from_handle(device);
394    const VkAllocationCallbacks *alloc =
395       pAllocator ? pAllocator : &dev->base.base.alloc;
396 
397    struct vn_pipeline_cache *cache =
398       vk_zalloc(alloc, sizeof(*cache), VN_DEFAULT_ALIGN,
399                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
400    if (!cache)
401       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
402 
403    vn_object_base_init(&cache->base, VK_OBJECT_TYPE_PIPELINE_CACHE,
404                        &dev->base);
405 
406    VkPipelineCacheCreateInfo local_create_info;
407    if (pCreateInfo->initialDataSize) {
408       const struct vk_pipeline_cache_header *header =
409          pCreateInfo->pInitialData;
410 
411       local_create_info = *pCreateInfo;
412       local_create_info.initialDataSize -= header->header_size;
413       local_create_info.pInitialData += header->header_size;
414       pCreateInfo = &local_create_info;
415    }
416 
417    VkPipelineCache cache_handle = vn_pipeline_cache_to_handle(cache);
418    vn_async_vkCreatePipelineCache(dev->primary_ring, device, pCreateInfo,
419                                   NULL, &cache_handle);
420 
421    *pPipelineCache = cache_handle;
422 
423    return VK_SUCCESS;
424 }
425 
426 void
vn_DestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)427 vn_DestroyPipelineCache(VkDevice device,
428                         VkPipelineCache pipelineCache,
429                         const VkAllocationCallbacks *pAllocator)
430 {
431    struct vn_device *dev = vn_device_from_handle(device);
432    struct vn_pipeline_cache *cache =
433       vn_pipeline_cache_from_handle(pipelineCache);
434    const VkAllocationCallbacks *alloc =
435       pAllocator ? pAllocator : &dev->base.base.alloc;
436 
437    if (!cache)
438       return;
439 
440    vn_async_vkDestroyPipelineCache(dev->primary_ring, device, pipelineCache,
441                                    NULL);
442 
443    vn_object_base_fini(&cache->base);
444    vk_free(alloc, cache);
445 }
446 
447 static struct vn_ring *
vn_get_target_ring(struct vn_device * dev)448 vn_get_target_ring(struct vn_device *dev)
449 {
450    if (vn_tls_get_async_pipeline_create())
451       return dev->primary_ring;
452 
453    struct vn_ring *ring = vn_tls_get_ring(dev->instance);
454    if (!ring)
455       return NULL;
456 
457    if (ring != dev->primary_ring) {
458       /* Ensure pipeline create and pipeline cache retrieval dependencies are
459        * ready on the renderer side.
460        *
461        * TODO:
462        * - For pipeline create, track ring seqnos of layout and renderpass
463        *   objects it depends on, and only wait for those seqnos once.
464        * - For pipeline cache retrieval, track ring seqno of pipeline cache
465        *   object it depends on. Treat different sync mode separately.
466        */
467       vn_ring_wait_all(dev->primary_ring);
468    }
469    return ring;
470 }
471 
472 VkResult
vn_GetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)473 vn_GetPipelineCacheData(VkDevice device,
474                         VkPipelineCache pipelineCache,
475                         size_t *pDataSize,
476                         void *pData)
477 {
478    struct vn_device *dev = vn_device_from_handle(device);
479    struct vn_physical_device *physical_dev = dev->physical_device;
480    struct vn_ring *target_ring = vn_get_target_ring(dev);
481 
482    struct vk_pipeline_cache_header *header = pData;
483    VkResult result;
484    if (!pData) {
485       result = vn_call_vkGetPipelineCacheData(target_ring, device,
486                                               pipelineCache, pDataSize, NULL);
487       if (result != VK_SUCCESS)
488          return vn_error(dev->instance, result);
489 
490       *pDataSize += sizeof(*header);
491       return VK_SUCCESS;
492    }
493 
494    if (*pDataSize <= sizeof(*header)) {
495       *pDataSize = 0;
496       return VK_INCOMPLETE;
497    }
498 
499    const struct vk_properties *props = &physical_dev->base.base.properties;
500    header->header_size = sizeof(*header);
501    header->header_version = VK_PIPELINE_CACHE_HEADER_VERSION_ONE;
502    header->vendor_id = props->vendorID;
503    header->device_id = props->deviceID;
504    memcpy(header->uuid, props->pipelineCacheUUID, VK_UUID_SIZE);
505 
506    *pDataSize -= header->header_size;
507    result =
508       vn_call_vkGetPipelineCacheData(target_ring, device, pipelineCache,
509                                      pDataSize, pData + header->header_size);
510    if (result < VK_SUCCESS)
511       return vn_error(dev->instance, result);
512 
513    *pDataSize += header->header_size;
514 
515    return result;
516 }
517 
518 VkResult
vn_MergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)519 vn_MergePipelineCaches(VkDevice device,
520                        VkPipelineCache dstCache,
521                        uint32_t srcCacheCount,
522                        const VkPipelineCache *pSrcCaches)
523 {
524    struct vn_device *dev = vn_device_from_handle(device);
525 
526    vn_async_vkMergePipelineCaches(dev->primary_ring, device, dstCache,
527                                   srcCacheCount, pSrcCaches);
528 
529    return VK_SUCCESS;
530 }
531 
532 /* pipeline commands */
533 
534 static struct vn_graphics_pipeline *
vn_graphics_pipeline_from_handle(VkPipeline pipeline_h)535 vn_graphics_pipeline_from_handle(VkPipeline pipeline_h)
536 {
537    struct vn_pipeline *p = vn_pipeline_from_handle(pipeline_h);
538    assert(p->type == VN_PIPELINE_TYPE_GRAPHICS);
539    return (struct vn_graphics_pipeline *)p;
540 }
541 
542 static bool
vn_create_pipeline_handles(struct vn_device * dev,enum vn_pipeline_type type,uint32_t pipeline_count,VkPipeline * pipeline_handles,const VkAllocationCallbacks * alloc)543 vn_create_pipeline_handles(struct vn_device *dev,
544                            enum vn_pipeline_type type,
545                            uint32_t pipeline_count,
546                            VkPipeline *pipeline_handles,
547                            const VkAllocationCallbacks *alloc)
548 {
549    size_t pipeline_size;
550 
551    switch (type) {
552    case VN_PIPELINE_TYPE_GRAPHICS:
553       pipeline_size = sizeof(struct vn_graphics_pipeline);
554       break;
555    case VN_PIPELINE_TYPE_COMPUTE:
556       pipeline_size = sizeof(struct vn_pipeline);
557       break;
558    }
559 
560    for (uint32_t i = 0; i < pipeline_count; i++) {
561       struct vn_pipeline *pipeline =
562          vk_zalloc(alloc, pipeline_size, VN_DEFAULT_ALIGN,
563                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
564 
565       if (!pipeline) {
566          for (uint32_t j = 0; j < i; j++) {
567             pipeline = vn_pipeline_from_handle(pipeline_handles[j]);
568             vn_object_base_fini(&pipeline->base);
569             vk_free(alloc, pipeline);
570          }
571 
572          memset(pipeline_handles, 0,
573                 pipeline_count * sizeof(pipeline_handles[0]));
574          return false;
575       }
576 
577       vn_object_base_init(&pipeline->base, VK_OBJECT_TYPE_PIPELINE,
578                           &dev->base);
579       pipeline->type = type;
580       pipeline_handles[i] = vn_pipeline_to_handle(pipeline);
581    }
582 
583    return true;
584 }
585 
586 static void
vn_destroy_pipeline_handles_internal(struct vn_device * dev,uint32_t pipeline_count,VkPipeline * pipeline_handles,const VkAllocationCallbacks * alloc,bool failed_only)587 vn_destroy_pipeline_handles_internal(struct vn_device *dev,
588                                      uint32_t pipeline_count,
589                                      VkPipeline *pipeline_handles,
590                                      const VkAllocationCallbacks *alloc,
591                                      bool failed_only)
592 {
593    for (uint32_t i = 0; i < pipeline_count; i++) {
594       struct vn_pipeline *pipeline =
595          vn_pipeline_from_handle(pipeline_handles[i]);
596 
597       if (!failed_only || pipeline->base.id == 0) {
598          if (pipeline->layout) {
599             vn_pipeline_layout_unref(dev, pipeline->layout);
600          }
601          vn_object_base_fini(&pipeline->base);
602          vk_free(alloc, pipeline);
603          pipeline_handles[i] = VK_NULL_HANDLE;
604       }
605    }
606 }
607 
608 static inline void
vn_destroy_pipeline_handles(struct vn_device * dev,uint32_t pipeline_count,VkPipeline * pipeline_handles,const VkAllocationCallbacks * alloc)609 vn_destroy_pipeline_handles(struct vn_device *dev,
610                             uint32_t pipeline_count,
611                             VkPipeline *pipeline_handles,
612                             const VkAllocationCallbacks *alloc)
613 {
614    vn_destroy_pipeline_handles_internal(dev, pipeline_count, pipeline_handles,
615                                         alloc, false);
616 }
617 
618 static inline void
vn_destroy_failed_pipeline_handles(struct vn_device * dev,uint32_t pipeline_count,VkPipeline * pipeline_handles,const VkAllocationCallbacks * alloc)619 vn_destroy_failed_pipeline_handles(struct vn_device *dev,
620                                    uint32_t pipeline_count,
621                                    VkPipeline *pipeline_handles,
622                                    const VkAllocationCallbacks *alloc)
623 {
624    vn_destroy_pipeline_handles_internal(dev, pipeline_count, pipeline_handles,
625                                         alloc, true);
626 }
627 
628 #define VN_PIPELINE_CREATE_SYNC_MASK                                         \
629    (VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT |             \
630     VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT)
631 
632 static struct vn_graphics_pipeline_fix_tmp *
vn_graphics_pipeline_fix_tmp_alloc(const VkAllocationCallbacks * alloc,uint32_t info_count,bool alloc_pnext)633 vn_graphics_pipeline_fix_tmp_alloc(const VkAllocationCallbacks *alloc,
634                                    uint32_t info_count,
635                                    bool alloc_pnext)
636 {
637    struct vn_graphics_pipeline_fix_tmp *tmp;
638    VkGraphicsPipelineCreateInfo *infos;
639    VkPipelineMultisampleStateCreateInfo *multisample_state_infos;
640    VkPipelineViewportStateCreateInfo *viewport_state_infos;
641 
642    /* for pNext */
643    VkGraphicsPipelineLibraryCreateInfoEXT *gpl_infos;
644    VkPipelineCreateFlags2CreateInfo *flags2_infos;
645    VkPipelineCreationFeedbackCreateInfo *feedback_infos;
646    VkPipelineFragmentShadingRateStateCreateInfoKHR *fsr_infos;
647    VkPipelineLibraryCreateInfoKHR *library_infos;
648    VkPipelineRenderingCreateInfo *rendering_infos;
649 
650    VK_MULTIALLOC(ma);
651    vk_multialloc_add(&ma, &tmp, __typeof__(*tmp), 1);
652    vk_multialloc_add(&ma, &infos, __typeof__(*infos), info_count);
653    vk_multialloc_add(&ma, &multisample_state_infos,
654                      __typeof__(*multisample_state_infos), info_count);
655    vk_multialloc_add(&ma, &viewport_state_infos,
656                      __typeof__(*viewport_state_infos), info_count);
657 
658    if (alloc_pnext) {
659       vk_multialloc_add(&ma, &gpl_infos, __typeof__(*gpl_infos), info_count);
660       vk_multialloc_add(&ma, &flags2_infos, __typeof__(*flags2_infos),
661                         info_count);
662       vk_multialloc_add(&ma, &feedback_infos, __typeof__(*feedback_infos),
663                         info_count);
664       vk_multialloc_add(&ma, &fsr_infos, __typeof__(*fsr_infos), info_count);
665       vk_multialloc_add(&ma, &library_infos, __typeof__(*library_infos),
666                         info_count);
667       vk_multialloc_add(&ma, &rendering_infos, __typeof__(*rendering_infos),
668                         info_count);
669    }
670 
671    if (!vk_multialloc_zalloc(&ma, alloc, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND))
672       return NULL;
673 
674    tmp->infos = infos;
675    tmp->multisample_state_infos = multisample_state_infos;
676    tmp->viewport_state_infos = viewport_state_infos;
677 
678    if (alloc_pnext) {
679       tmp->gpl_infos = gpl_infos;
680       tmp->flags2_infos = flags2_infos;
681       tmp->feedback_infos = feedback_infos;
682       tmp->fsr_infos = fsr_infos;
683       tmp->library_infos = library_infos;
684       tmp->rendering_infos = rendering_infos;
685    }
686 
687    return tmp;
688 }
689 
690 /**
691  * Update \a gpl with the VkGraphicsPipelineLibraryFlagsEXT that the pipeline
692  * provides directly (without linking). The spec says that the pipeline always
693  * provides flags, but may do it implicitly.
694  *
695  * From the Vulkan 1.3.251 spec:
696  *
697  *    If this structure [VkGraphicsPipelineLibraryCreateInfoEXT] is
698  *    omitted, and either VkGraphicsPipelineCreateInfo::flags includes
699  *    VK_PIPELINE_CREATE_LIBRARY_BIT_KHR or the
700  *    VkGraphicsPipelineCreateInfo::pNext chain includes
701  *    a VkPipelineLibraryCreateInfoKHR structure with a libraryCount
702  *    greater than 0, it is as if flags is 0. Otherwise if this
703  *    structure is omitted, it is as if flags includes all possible subsets
704  *    of the graphics pipeline (i.e. a complete graphics pipeline).
705  */
706 static void
vn_graphics_pipeline_library_state_update(const VkGraphicsPipelineCreateInfo * info,VkPipelineCreateFlags2 flags2,struct vn_graphics_pipeline_library_state * restrict gpl)707 vn_graphics_pipeline_library_state_update(
708    const VkGraphicsPipelineCreateInfo *info,
709    VkPipelineCreateFlags2 flags2,
710    struct vn_graphics_pipeline_library_state *restrict gpl)
711 {
712    const VkGraphicsPipelineLibraryCreateInfoEXT *gpl_info =
713       vk_find_struct_const(info->pNext,
714                            GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT);
715    const VkPipelineLibraryCreateInfoKHR *lib_info =
716       vk_find_struct_const(info->pNext, PIPELINE_LIBRARY_CREATE_INFO_KHR);
717    const uint32_t lib_count = lib_info ? lib_info->libraryCount : 0;
718 
719    if (gpl_info) {
720       gpl->mask |= gpl_info->flags;
721    } else if ((flags2 & VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR) ||
722               lib_count > 0) {
723       gpl->mask |= 0;
724    } else {
725       gpl->mask |=
726          VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT |
727          VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT |
728          VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT |
729          VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT;
730    }
731 }
732 
733 /**
734  * Update \a dynamic with the VkDynamicState that the pipeline provides
735  * directly (without linking).
736  *
737  * \a direct_gpl The VkGraphicsPipelineLibraryFlagsEXT that the pipeline sets
738  *    directly (without linking).
739  */
740 static void
vn_graphics_dynamic_state_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state direct_gpl,struct vn_graphics_dynamic_state * restrict dynamic)741 vn_graphics_dynamic_state_update(
742    const VkGraphicsPipelineCreateInfo *info,
743    struct vn_graphics_pipeline_library_state direct_gpl,
744    struct vn_graphics_dynamic_state *restrict dynamic)
745 {
746    const VkPipelineDynamicStateCreateInfo *dyn_info = info->pDynamicState;
747    if (!dyn_info)
748       return;
749 
750    struct vn_graphics_dynamic_state raw = { 0 };
751 
752    for (uint32_t i = 0; i < dyn_info->dynamicStateCount; i++) {
753       switch (dyn_info->pDynamicStates[i]) {
754       case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT:
755          raw.vertex_input = true;
756          break;
757       case VK_DYNAMIC_STATE_VIEWPORT:
758          raw.viewport = true;
759          break;
760       case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT:
761          raw.viewport_with_count = true;
762          break;
763       case VK_DYNAMIC_STATE_SAMPLE_MASK_EXT:
764          raw.sample_mask = true;
765          break;
766       case VK_DYNAMIC_STATE_SCISSOR:
767          raw.scissor = true;
768          break;
769       case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT:
770          raw.scissor_with_count = true;
771          break;
772       case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE:
773          raw.rasterizer_discard_enable = true;
774          break;
775       default:
776          break;
777       }
778    }
779 
780    /* We must ignore VkDynamicState unrelated to the
781     * VkGraphicsPipelineLibraryFlagsEXT that the pipeline provides directly
782     * (without linking).
783     *
784     *    [Vulkan 1.3.252]
785     *    Dynamic state values set via pDynamicState must be ignored if the
786     *    state they correspond to is not otherwise statically set by one of
787     *    the state subsets used to create the pipeline.
788     *
789     * In general, we must update dynamic state bits with `|=` rather than `=`
790     * because multiple GPL state subsets can enable the same dynamic state.
791     *
792     *    [Vulkan 1.3.252]
793     *    Any linked library that has dynamic state enabled that same dynamic
794     *    state must also be enabled in all the other linked libraries to which
795     *    that dynamic state applies.
796     */
797    if (direct_gpl.vertex_input) {
798       dynamic->vertex_input |= raw.vertex_input;
799    }
800    if (direct_gpl.pre_raster_shaders) {
801       dynamic->viewport |= raw.viewport;
802       dynamic->viewport_with_count |= raw.viewport_with_count;
803       dynamic->scissor |= raw.scissor;
804       dynamic->scissor_with_count |= raw.scissor_with_count;
805       dynamic->rasterizer_discard_enable |= raw.rasterizer_discard_enable;
806    }
807    if (direct_gpl.fragment_shader) {
808       dynamic->sample_mask |= raw.sample_mask;
809    }
810    if (direct_gpl.fragment_output) {
811       dynamic->sample_mask |= raw.sample_mask;
812    }
813 }
814 
815 /**
816  * Update \a shader_stages with the VkShaderStageFlags that the pipeline
817  * provides directly (without linking).
818  *
819  * \a direct_gpl The VkGraphicsPipelineLibraryFlagsEXT that the pipeline sets
820  *    directly (without linking).
821  */
822 static void
vn_graphics_shader_stages_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state direct_gpl,struct vn_graphics_pipeline_fix_desc * restrict valid,VkShaderStageFlags * restrict shader_stages)823 vn_graphics_shader_stages_update(
824    const VkGraphicsPipelineCreateInfo *info,
825    struct vn_graphics_pipeline_library_state direct_gpl,
826    struct vn_graphics_pipeline_fix_desc *restrict valid,
827    VkShaderStageFlags *restrict shader_stages)
828 {
829    /* From the Vulkan 1.3.251 spec:
830     *
831     *    VUID-VkGraphicsPipelineCreateInfo-flags-06640
832     *
833     *    If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes
834     *    VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT or
835     *    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, pStages must be
836     *    a valid pointer to an array of stageCount valid
837     *    VkPipelineShaderStageCreateInfo structures
838     */
839    if (!direct_gpl.pre_raster_shaders && !direct_gpl.fragment_shader)
840       return;
841 
842    valid->self.shader_stages = true;
843 
844    for (uint32_t i = 0; i < info->stageCount; i++) {
845       /* We do not need to ignore the stages irrelevant to the GPL flags.
846        * The following VUs require the app to provide only relevant stages.
847        *
848        * VUID-VkGraphicsPipelineCreateInfo-pStages-06894
849        * VUID-VkGraphicsPipelineCreateInfo-pStages-06895
850        * VUID-VkGraphicsPipelineCreateInfo-pStages-06896
851        */
852       *shader_stages |= info->pStages[i].stage;
853    }
854 }
855 
856 /**
857  * Update the render pass state with the state that the pipeline provides
858  * directly (without linking).
859  *
860  * \a direct_gpl The VkGraphicsPipelineLibraryFlagsEXT that the pipeline sets
861  *    directly (without linking).
862  */
863 static void
vn_render_pass_state_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state direct_gpl,struct vn_graphics_pipeline_fix_desc * restrict valid,struct vn_render_pass_state * restrict state)864 vn_render_pass_state_update(
865    const VkGraphicsPipelineCreateInfo *info,
866    struct vn_graphics_pipeline_library_state direct_gpl,
867    struct vn_graphics_pipeline_fix_desc *restrict valid,
868    struct vn_render_pass_state *restrict state)
869 {
870    /* We must set validity before early returns, to ensure we don't erase
871     * valid info during fixup.  We must not erase valid info because, even if
872     * we don't read it, the host driver may read it.
873     */
874 
875    /* VUID-VkGraphicsPipelineCreateInfo-flags-06643
876     *
877     * If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes
878     * VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT, or
879     * VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT,
880     * VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT, and
881     * renderPass is not VK_NULL_HANDLE, renderPass must be a valid
882     * VkRenderPass handle
883     */
884    valid->self.render_pass |= direct_gpl.pre_raster_shaders ||
885                               direct_gpl.fragment_shader ||
886                               direct_gpl.fragment_output;
887 
888    /* VUID-VkGraphicsPipelineCreateInfo-renderPass-06579
889     *
890     * If the pipeline requires fragment output interface state, and renderPass
891     * is VK_NULL_HANDLE, and
892     * VkPipelineRenderingCreateInfo::colorAttachmentCount is not 0,
893     * VkPipelineRenderingCreateInfo::pColorAttachmentFormats must be a valid
894     * pointer to an array of colorAttachmentCount valid VkFormat values
895     *
896     * VUID-VkGraphicsPipelineCreateInfo-renderPass-06580
897     *
898     * If the pipeline requires fragment output interface state, and renderPass
899     * is VK_NULL_HANDLE, each element of
900     * VkPipelineRenderingCreateInfo::pColorAttachmentFormats must be a valid
901     * VkFormat value
902     */
903    valid->pnext.rendering_info_formats |=
904       direct_gpl.fragment_output && !info->renderPass;
905 
906    if (state->attachment_aspects != VK_IMAGE_ASPECT_METADATA_BIT) {
907       /* We have previously collected the pipeline's attachment aspects.  We
908        * do not need to inspect the attachment info again because VUs ensure
909        * that all valid render pass info used to create the pipeline and its
910        * linked pipelines are compatible.  Ignored info is not required to be
911        * compatible across linked pipeline libraries. An example of ignored
912        * info is VkPipelineRenderingCreateInfo::pColorAttachmentFormats
913        * without
914        * VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT.
915        *
916        * VUID-VkGraphicsPipelineCreateInfo-renderpass-06625
917        * VUID-VkGraphicsPipelineCreateInfo-pLibraries-06628
918        */
919       return;
920    }
921 
922    if (valid->self.render_pass && info->renderPass) {
923       struct vn_render_pass *pass =
924          vn_render_pass_from_handle(info->renderPass);
925       state->attachment_aspects =
926          pass->subpasses[info->subpass].attachment_aspects;
927       return;
928    }
929 
930    if (valid->pnext.rendering_info_formats) {
931       state->attachment_aspects = 0;
932 
933       /* From the Vulkan 1.3.255 spec:
934        *
935        *    When a pipeline is created without a VkRenderPass, if this
936        *    structure [VkPipelineRenderingCreateInfo] is present in the pNext
937        *    chain of VkGraphicsPipelineCreateInfo, it specifies the view mask
938        *    and format of attachments used for rendering.  If this structure
939        *    is not specified, and the pipeline does not include
940        *    a VkRenderPass, viewMask and colorAttachmentCount are 0, and
941        *    depthAttachmentFormat and stencilAttachmentFormat are
942        *    VK_FORMAT_UNDEFINED. If a graphics pipeline is created with
943        *    a valid VkRenderPass, parameters of this structure are ignored.
944        *
945        * However, other spec text clearly states that the format members of
946        * VkPipelineRenderingCreateInfo are ignored unless the pipeline
947        * provides fragment output interface state directly (without linking).
948        */
949       const VkPipelineRenderingCreateInfo *r_info =
950          vk_find_struct_const(info->pNext, PIPELINE_RENDERING_CREATE_INFO);
951 
952       if (r_info) {
953          for (uint32_t i = 0; i < r_info->colorAttachmentCount; i++) {
954             if (r_info->pColorAttachmentFormats[i]) {
955                state->attachment_aspects |= VK_IMAGE_ASPECT_COLOR_BIT;
956                break;
957             }
958          }
959          if (r_info->depthAttachmentFormat)
960             state->attachment_aspects |= VK_IMAGE_ASPECT_DEPTH_BIT;
961          if (r_info->stencilAttachmentFormat)
962             state->attachment_aspects |= VK_IMAGE_ASPECT_STENCIL_BIT;
963       }
964 
965       return;
966    }
967 
968    /* Aspects remain invalid. */
969    assert(state->attachment_aspects == VK_IMAGE_ASPECT_METADATA_BIT);
970 }
971 
972 static void
vn_graphics_pipeline_state_merge(struct vn_graphics_pipeline_state * restrict dst,const struct vn_graphics_pipeline_state * restrict src)973 vn_graphics_pipeline_state_merge(
974    struct vn_graphics_pipeline_state *restrict dst,
975    const struct vn_graphics_pipeline_state *restrict src)
976 {
977    /* The Vulkan 1.3.251 spec says:
978     *    VUID-VkGraphicsPipelineCreateInfo-pLibraries-06611
979     *
980     *    Any pipeline libraries included via
981     *    VkPipelineLibraryCreateInfoKHR::pLibraries must not include any state
982     *    subset already defined by this structure or defined by any other
983     *    pipeline library in VkPipelineLibraryCreateInfoKHR::pLibraries
984     */
985    assert(!(dst->gpl.mask & src->gpl.mask));
986 
987    dst->gpl.mask |= src->gpl.mask;
988    dst->dynamic.mask |= src->dynamic.mask;
989    dst->shader_stages |= src->shader_stages;
990 
991    VkImageAspectFlags src_aspects = src->render_pass.attachment_aspects;
992    VkImageAspectFlags *dst_aspects = &dst->render_pass.attachment_aspects;
993 
994    if (src_aspects != VK_IMAGE_ASPECT_METADATA_BIT) {
995       if (*dst_aspects != VK_IMAGE_ASPECT_METADATA_BIT) {
996          /* All linked pipelines must have compatible render pass info. */
997          assert(*dst_aspects == src_aspects);
998       } else {
999          *dst_aspects = src_aspects;
1000       }
1001    }
1002 
1003    if (dst->gpl.pre_raster_shaders)
1004       dst->rasterizer_discard_enable = src->rasterizer_discard_enable;
1005 }
1006 
1007 /**
1008  * Fill \a state by reading the VkGraphicsPipelineCreateInfo pNext chain,
1009  * including any linked pipeline libraries. Return in \a out_fix_desc
1010  * a description of required fixes to the VkGraphicsPipelineCreateInfo chain.
1011  *
1012  * \pre state is zero-filled
1013  *
1014  * The logic for choosing which struct members to ignore, and which members
1015  * have valid values, is derived from the Vulkan spec sections for
1016  * VkGraphicsPipelineCreateInfo, VkGraphicsPipelineLibraryCreateInfoEXT, and
1017  * VkPipelineLibraryCreateInfoKHR. As of Vulkan 1.3.255, the spec text and VUs
1018  * still contain inconsistencies regarding the validity of struct members, so
1019  * read it carefully. Many of the VUs were written before
1020  * VK_EXT_graphics_pipeline_library and never updated. (Lina's advice: Focus
1021  * primarily on understanding the non-VU text, and use VUs to verify your
1022  * comprehension).
1023  */
1024 static void
vn_graphics_pipeline_state_fill(const VkGraphicsPipelineCreateInfo * info,VkPipelineCreateFlags2 flags2,struct vn_graphics_pipeline_state * restrict state,struct vn_graphics_pipeline_fix_desc * out_fix_desc)1025 vn_graphics_pipeline_state_fill(
1026    const VkGraphicsPipelineCreateInfo *info,
1027    VkPipelineCreateFlags2 flags2,
1028    struct vn_graphics_pipeline_state *restrict state,
1029    struct vn_graphics_pipeline_fix_desc *out_fix_desc)
1030 {
1031    /* Assume that state is already zero-filled.
1032     *
1033     * Invalidate attachment_aspects.
1034     */
1035    state->render_pass.attachment_aspects = VK_IMAGE_ASPECT_METADATA_BIT;
1036 
1037    const VkPipelineRenderingCreateInfo *rendering_info =
1038       vk_find_struct_const(info->pNext, PIPELINE_RENDERING_CREATE_INFO);
1039    const VkPipelineLibraryCreateInfoKHR *lib_info =
1040       vk_find_struct_const(info->pNext, PIPELINE_LIBRARY_CREATE_INFO_KHR);
1041    const uint32_t lib_count = lib_info ? lib_info->libraryCount : 0;
1042 
1043    /* This tracks which fields have valid values in the
1044     * VkGraphicsPipelineCreateInfo pNext chain.
1045     *
1046     * We initially assume that all fields are invalid. We flip fields from
1047     * invalid to valid as we dig through the pNext chain.
1048     *
1049     * A single field may be updated at multiple locations, therefore we update
1050     * with `|=` instead of `=`.
1051     *
1052     * If `valid.foo` is set, then foo has a valid value if foo exists in the
1053     * pNext chain. Even though NULL is not a valid pointer, NULL is considered
1054     * a valid *value* for a pointer-typed variable. Same for VK_NULL_HANDLE
1055     * and Vulkan handle-typed variables.
1056     *
1057     * Conversely, if `valid.foo` remains false at the end of this function,
1058     * then the Vulkan spec permits foo to have any value. If foo has a pointer
1059     * type, it may be an invalid pointer. If foo has a Vulkan handle type, it
1060     * may be an invalid handle.
1061     */
1062    struct vn_graphics_pipeline_fix_desc valid = { 0 };
1063 
1064    /* Merge the linked pipeline libraries. */
1065    for (uint32_t i = 0; i < lib_count; i++) {
1066       struct vn_graphics_pipeline *p =
1067          vn_graphics_pipeline_from_handle(lib_info->pLibraries[i]);
1068       vn_graphics_pipeline_state_merge(state, &p->state);
1069    }
1070 
1071    /* The VkGraphicsPipelineLibraryFlagsEXT that this pipeline provides
1072     * directly (without linking).
1073     */
1074    struct vn_graphics_pipeline_library_state direct_gpl = { 0 };
1075    vn_graphics_pipeline_library_state_update(info, flags2, &direct_gpl);
1076 
1077    /* From the Vulkan 1.3.251 spec:
1078     *    VUID-VkGraphicsPipelineCreateInfo-pLibraries-06611
1079     *
1080     *    Any pipeline libraries included via
1081     *    VkPipelineLibraryCreateInfoKHR::pLibraries must not include any state
1082     *    subset already defined by this structure or defined by any other
1083     *    pipeline library in VkPipelineLibraryCreateInfoKHR::pLibraries
1084     */
1085    assert(!(direct_gpl.mask & state->gpl.mask));
1086 
1087    /* Collect orthogonal state that is common to multiple GPL state subsets. */
1088    vn_graphics_dynamic_state_update(info, direct_gpl, &state->dynamic);
1089    vn_graphics_shader_stages_update(info, direct_gpl, &valid,
1090                                     &state->shader_stages);
1091    vn_render_pass_state_update(info, direct_gpl, &valid, &state->render_pass);
1092 
1093    /* Collect remaining pre-raster shaders state.
1094     *
1095     * Of the remaining state, we must first collect the pre-raster shaders
1096     * state because it influences how the other state is collected.
1097     */
1098    if (direct_gpl.pre_raster_shaders) {
1099       valid.self.tessellation_state |=
1100          (bool)(state->shader_stages &
1101                 (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
1102                  VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
1103       valid.self.rasterization_state = true;
1104       valid.self.pipeline_layout = true;
1105 
1106       if (info->pRasterizationState) {
1107          state->rasterizer_discard_enable =
1108             info->pRasterizationState->rasterizerDiscardEnable;
1109       }
1110 
1111       const bool is_raster_statically_disabled =
1112          !state->dynamic.rasterizer_discard_enable &&
1113          state->rasterizer_discard_enable;
1114 
1115       if (!is_raster_statically_disabled) {
1116          valid.self.viewport_state = true;
1117 
1118          valid.self.viewport_state_viewports =
1119             !state->dynamic.viewport && !state->dynamic.viewport_with_count;
1120 
1121          valid.self.viewport_state_scissors =
1122             !state->dynamic.scissor && !state->dynamic.scissor_with_count;
1123       }
1124 
1125       /* Defer setting the flag until all its state is filled. */
1126       state->gpl.pre_raster_shaders = true;
1127    }
1128 
1129    /* Collect remaining vertex input interface state.
1130     *
1131     * TODO(VK_EXT_mesh_shader): Update.
1132     */
1133    if (direct_gpl.vertex_input) {
1134       const bool may_have_vertex_shader =
1135          !state->gpl.pre_raster_shaders ||
1136          (state->shader_stages & VK_SHADER_STAGE_VERTEX_BIT);
1137 
1138       valid.self.vertex_input_state |=
1139          may_have_vertex_shader && !state->dynamic.vertex_input;
1140 
1141       valid.self.input_assembly_state |= may_have_vertex_shader;
1142 
1143       /* Defer setting the flag until all its state is filled. */
1144       state->gpl.vertex_input = true;
1145    }
1146 
1147    /* Does this pipeline have rasterization statically disabled? If disabled,
1148     * then this pipeline does not directly provide fragment shader state nor
1149     * fragment output state.
1150     *
1151     * About fragment shader state, the Vulkan 1.3.254 spec says:
1152     *
1153     *    If a pipeline specifies pre-rasterization state either directly or by
1154     *    including it as a pipeline library and rasterizerDiscardEnable is set
1155     *    to VK_FALSE or VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE is used,
1156     *    this state must be specified to create a complete graphics pipeline.
1157     *
1158     *    If a pipeline includes
1159     *    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT in
1160     *    VkGraphicsPipelineLibraryCreateInfoEXT::flags either explicitly or as
1161     *    a default, and either the conditions requiring this state for
1162     *    a complete graphics pipeline are met or this pipeline does not
1163     *    specify pre-rasterization state in any way, that pipeline must
1164     *    specify this state directly.
1165     *
1166     * About fragment output state, the Vulkan 1.3.254 spec says the same, but
1167     * with VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT.
1168     */
1169    const bool is_raster_statically_disabled =
1170       state->gpl.pre_raster_shaders &&
1171       !state->dynamic.rasterizer_discard_enable &&
1172       state->rasterizer_discard_enable;
1173 
1174    /* Collect remaining fragment shader state. */
1175    if (direct_gpl.fragment_shader) {
1176       if (!is_raster_statically_disabled) {
1177          /* Validity of pMultisampleState is easy here.
1178           *
1179           *    VUID-VkGraphicsPipelineCreateInfo-pMultisampleState-06629
1180           *
1181           *    If the pipeline requires fragment shader state
1182           *    pMultisampleState must be NULL or a valid pointer to a valid
1183           *    VkPipelineMultisampleStateCreateInfo structure
1184           */
1185          valid.self.multisample_state = true;
1186 
1187          valid.self.multisample_state_sample_mask =
1188             !state->dynamic.sample_mask;
1189 
1190          if ((state->render_pass.attachment_aspects &
1191               (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
1192             valid.self.depth_stencil_state = true;
1193          } else if (state->render_pass.attachment_aspects ==
1194                        VK_IMAGE_ASPECT_METADATA_BIT &&
1195                     (flags2 & VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR)) {
1196             /* The app has not yet provided render pass info, neither directly
1197              * in this VkGraphicsPipelineCreateInfo nor in any linked pipeline
1198              * libraries. Therefore we do not know if the final complete
1199              * pipeline will have any depth or stencil attachments. If the
1200              * final complete pipeline does have depth or stencil attachments,
1201              * then the pipeline will use
1202              * VkPipelineDepthStencilStateCreateInfo. Therefore, we must not
1203              * ignore it.
1204              */
1205             valid.self.depth_stencil_state = true;
1206          }
1207 
1208          valid.self.pipeline_layout = true;
1209       }
1210 
1211       /* Defer setting the flag until all its state is filled. */
1212       state->gpl.fragment_shader = true;
1213    }
1214 
1215    /* Collect remaining fragment output interface state. */
1216    if (direct_gpl.fragment_output) {
1217       if (!is_raster_statically_disabled) {
1218          /* Validity of pMultisampleState is easy here.
1219           *
1220           *    VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751
1221           *
1222           *    If the pipeline requires fragment output interface state,
1223           *    pMultisampleState must be a valid pointer to a valid
1224           *    VkPipelineMultisampleStateCreateInfo structure
1225           */
1226          valid.self.multisample_state = true;
1227 
1228          valid.self.multisample_state_sample_mask =
1229             !state->dynamic.sample_mask;
1230 
1231          valid.self.color_blend_state |=
1232             (bool)(state->render_pass.attachment_aspects &
1233                    VK_IMAGE_ASPECT_COLOR_BIT);
1234          valid.self.depth_stencil_state |=
1235             (bool)(state->render_pass.attachment_aspects &
1236                    (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
1237       }
1238 
1239       /* Defer setting the flag until all its state is filled. */
1240       state->gpl.fragment_output = true;
1241    }
1242 
1243    /* After direct_gpl states collection, check the final state to validate
1244     * VkPipelineLayout in case of being the final layout in linked pipeline.
1245     *
1246     * From the Vulkan 1.3.275 spec:
1247     *    VUID-VkGraphicsPipelineCreateInfo-layout-06602
1248     *
1249     *    If the pipeline requires fragment shader state or pre-rasterization
1250     *    shader state, layout must be a valid VkPipelineLayout handle
1251     */
1252    if ((state->gpl.fragment_shader && !is_raster_statically_disabled) ||
1253        state->gpl.pre_raster_shaders)
1254       valid.self.pipeline_layout = true;
1255 
1256    /* Pipeline Derivatives
1257     *
1258     *    VUID-VkGraphicsPipelineCreateInfo-flags-07984
1259     *
1260     *    If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and
1261     *    basePipelineIndex is -1, basePipelineHandle must be a valid graphics
1262     *    VkPipeline handle
1263     */
1264    if ((flags2 & VK_PIPELINE_CREATE_2_DERIVATIVE_BIT) &&
1265        info->basePipelineIndex == -1)
1266       valid.self.base_pipeline_handle = true;
1267 
1268    *out_fix_desc = (struct vn_graphics_pipeline_fix_desc) {
1269       .self = {
1270          /* clang-format off */
1271          .shader_stages =
1272             !valid.self.shader_stages &&
1273             info->pStages,
1274          .vertex_input_state =
1275             !valid.self.vertex_input_state &&
1276             info->pVertexInputState,
1277          .input_assembly_state =
1278             !valid.self.input_assembly_state &&
1279             info->pInputAssemblyState,
1280          .tessellation_state =
1281             !valid.self.tessellation_state &&
1282             info->pTessellationState,
1283          .viewport_state =
1284             !valid.self.viewport_state &&
1285             info->pViewportState,
1286          .viewport_state_viewports =
1287             !valid.self.viewport_state_viewports &&
1288             valid.self.viewport_state &&
1289             info->pViewportState &&
1290             info->pViewportState->pViewports &&
1291             info->pViewportState->viewportCount,
1292          .viewport_state_scissors =
1293             !valid.self.viewport_state_scissors &&
1294             valid.self.viewport_state &&
1295             info->pViewportState &&
1296             info->pViewportState->pScissors &&
1297             info->pViewportState->scissorCount,
1298          .rasterization_state =
1299             !valid.self.rasterization_state &&
1300             info->pRasterizationState,
1301          .multisample_state =
1302             !valid.self.multisample_state &&
1303             info->pMultisampleState,
1304          .multisample_state_sample_mask =
1305             !valid.self.multisample_state_sample_mask &&
1306             valid.self.multisample_state &&
1307             info->pMultisampleState &&
1308             info->pMultisampleState->pSampleMask,
1309          .depth_stencil_state =
1310             !valid.self.depth_stencil_state &&
1311             info->pDepthStencilState,
1312          .color_blend_state =
1313             !valid.self.color_blend_state &&
1314             info->pColorBlendState,
1315          .pipeline_layout =
1316             !valid.self.pipeline_layout &&
1317             info->layout,
1318          .render_pass =
1319             !valid.self.render_pass &&
1320             info->renderPass,
1321          .base_pipeline_handle =
1322             !valid.self.base_pipeline_handle &&
1323             info->basePipelineHandle,
1324          /* clang-format on */
1325       },
1326       .pnext = {
1327          /* clang-format off */
1328          .rendering_info_formats =
1329             !valid.pnext.rendering_info_formats &&
1330             rendering_info &&
1331             rendering_info->pColorAttachmentFormats &&
1332             rendering_info->colorAttachmentCount,
1333          /* clang-format on */
1334       },
1335    };
1336 }
1337 
1338 static void
vn_fix_graphics_pipeline_create_info_self(const struct vn_graphics_pipeline_info_self * ignore,const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_fix_tmp * fix_tmp,uint32_t index)1339 vn_fix_graphics_pipeline_create_info_self(
1340    const struct vn_graphics_pipeline_info_self *ignore,
1341    const VkGraphicsPipelineCreateInfo *info,
1342    struct vn_graphics_pipeline_fix_tmp *fix_tmp,
1343    uint32_t index)
1344 {
1345    /* VkGraphicsPipelineCreateInfo */
1346    if (ignore->shader_stages) {
1347       fix_tmp->infos[index].stageCount = 0;
1348       fix_tmp->infos[index].pStages = NULL;
1349    }
1350    if (ignore->vertex_input_state)
1351       fix_tmp->infos[index].pVertexInputState = NULL;
1352    if (ignore->input_assembly_state)
1353       fix_tmp->infos[index].pInputAssemblyState = NULL;
1354    if (ignore->tessellation_state)
1355       fix_tmp->infos[index].pTessellationState = NULL;
1356    if (ignore->viewport_state)
1357       fix_tmp->infos[index].pViewportState = NULL;
1358    if (ignore->rasterization_state)
1359       fix_tmp->infos[index].pRasterizationState = NULL;
1360    if (ignore->multisample_state)
1361       fix_tmp->infos[index].pMultisampleState = NULL;
1362    if (ignore->depth_stencil_state)
1363       fix_tmp->infos[index].pDepthStencilState = NULL;
1364    if (ignore->color_blend_state)
1365       fix_tmp->infos[index].pColorBlendState = NULL;
1366    if (ignore->pipeline_layout)
1367       fix_tmp->infos[index].layout = VK_NULL_HANDLE;
1368    if (ignore->base_pipeline_handle)
1369       fix_tmp->infos[index].basePipelineHandle = VK_NULL_HANDLE;
1370 
1371    /* VkPipelineMultisampleStateCreateInfo */
1372    if (ignore->multisample_state_sample_mask) {
1373       /* Swap original pMultisampleState with temporary state. */
1374       fix_tmp->multisample_state_infos[index] = *info->pMultisampleState;
1375       fix_tmp->infos[index].pMultisampleState =
1376          &fix_tmp->multisample_state_infos[index];
1377 
1378       fix_tmp->multisample_state_infos[index].pSampleMask = NULL;
1379    }
1380 
1381    /* VkPipelineViewportStateCreateInfo */
1382    if (ignore->viewport_state_viewports || ignore->viewport_state_scissors) {
1383       /* Swap original pViewportState with temporary state. */
1384       fix_tmp->viewport_state_infos[index] = *info->pViewportState;
1385       fix_tmp->infos[index].pViewportState =
1386          &fix_tmp->viewport_state_infos[index];
1387 
1388       if (ignore->viewport_state_viewports)
1389          fix_tmp->viewport_state_infos[index].pViewports = NULL;
1390       if (ignore->viewport_state_scissors)
1391          fix_tmp->viewport_state_infos[index].pScissors = NULL;
1392    }
1393 }
1394 
1395 static void
vn_graphics_pipeline_create_info_pnext_init(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_fix_tmp * fix_tmp,uint32_t index)1396 vn_graphics_pipeline_create_info_pnext_init(
1397    const VkGraphicsPipelineCreateInfo *info,
1398    struct vn_graphics_pipeline_fix_tmp *fix_tmp,
1399    uint32_t index)
1400 {
1401    VkGraphicsPipelineLibraryCreateInfoEXT *gpl = &fix_tmp->gpl_infos[index];
1402    VkPipelineCreateFlags2CreateInfo *flags2 = &fix_tmp->flags2_infos[index];
1403    VkPipelineCreationFeedbackCreateInfo *feedback =
1404       &fix_tmp->feedback_infos[index];
1405    VkPipelineFragmentShadingRateStateCreateInfoKHR *fsr =
1406       &fix_tmp->fsr_infos[index];
1407    VkPipelineLibraryCreateInfoKHR *library = &fix_tmp->library_infos[index];
1408    VkPipelineRenderingCreateInfo *rendering =
1409       &fix_tmp->rendering_infos[index];
1410 
1411    VkBaseOutStructure *cur = (void *)&fix_tmp->infos[index];
1412 
1413    vk_foreach_struct_const(src, info->pNext) {
1414       void *next = NULL;
1415       switch (src->sType) {
1416       case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT:
1417          memcpy(gpl, src, sizeof(*gpl));
1418          next = gpl;
1419          break;
1420       case VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO:
1421          memcpy(flags2, src, sizeof(*flags2));
1422          next = flags2;
1423          break;
1424       case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO:
1425          memcpy(feedback, src, sizeof(*feedback));
1426          next = feedback;
1427          break;
1428       case VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR:
1429          memcpy(fsr, src, sizeof(*fsr));
1430          next = fsr;
1431          break;
1432       case VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR:
1433          memcpy(library, src, sizeof(*library));
1434          next = library;
1435          break;
1436       case VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO:
1437          memcpy(rendering, src, sizeof(*rendering));
1438          next = rendering;
1439          break;
1440       default:
1441          break;
1442       }
1443 
1444       if (next) {
1445          cur->pNext = next;
1446          cur = next;
1447       }
1448    }
1449 
1450    cur->pNext = NULL;
1451 }
1452 
1453 static void
vn_fix_graphics_pipeline_create_info_pnext(const struct vn_graphics_pipeline_info_pnext * ignore,const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_fix_tmp * fix_tmp,uint32_t index)1454 vn_fix_graphics_pipeline_create_info_pnext(
1455    const struct vn_graphics_pipeline_info_pnext *ignore,
1456    const VkGraphicsPipelineCreateInfo *info,
1457    struct vn_graphics_pipeline_fix_tmp *fix_tmp,
1458    uint32_t index)
1459 {
1460    /* initialize pNext chain with allocated tmp storage */
1461    vn_graphics_pipeline_create_info_pnext_init(info, fix_tmp, index);
1462 
1463    /* VkPipelineRenderingCreateInfo */
1464    if (ignore->rendering_info_formats) {
1465       fix_tmp->rendering_infos[index].colorAttachmentCount = 0;
1466       fix_tmp->rendering_infos[index].pColorAttachmentFormats = NULL;
1467    }
1468 }
1469 
1470 static const VkGraphicsPipelineCreateInfo *
vn_fix_graphics_pipeline_create_infos(struct vn_device * dev,uint32_t info_count,const VkGraphicsPipelineCreateInfo * infos,const struct vn_graphics_pipeline_fix_desc fix_descs[info_count],struct vn_graphics_pipeline_fix_tmp ** out_fix_tmp,const VkAllocationCallbacks * alloc)1471 vn_fix_graphics_pipeline_create_infos(
1472    struct vn_device *dev,
1473    uint32_t info_count,
1474    const VkGraphicsPipelineCreateInfo *infos,
1475    const struct vn_graphics_pipeline_fix_desc fix_descs[info_count],
1476    struct vn_graphics_pipeline_fix_tmp **out_fix_tmp,
1477    const VkAllocationCallbacks *alloc)
1478 {
1479    uint32_t self_mask = 0;
1480    uint32_t pnext_mask = 0;
1481    for (uint32_t i = 0; i < info_count; i++) {
1482       self_mask |= fix_descs[i].self.mask;
1483       pnext_mask |= fix_descs[i].pnext.mask;
1484    }
1485 
1486    if (!self_mask && !pnext_mask) {
1487       /* No fix is needed. */
1488       *out_fix_tmp = NULL;
1489       return infos;
1490    }
1491 
1492    /* tell whether fixes are applied in tracing */
1493    VN_TRACE_SCOPE("sanitize pipeline");
1494 
1495    struct vn_graphics_pipeline_fix_tmp *fix_tmp =
1496       vn_graphics_pipeline_fix_tmp_alloc(alloc, info_count, pnext_mask);
1497    if (!fix_tmp)
1498       return NULL;
1499 
1500    memcpy(fix_tmp->infos, infos, info_count * sizeof(infos[0]));
1501 
1502    for (uint32_t i = 0; i < info_count; i++) {
1503       if (fix_descs[i].self.mask) {
1504          vn_fix_graphics_pipeline_create_info_self(&fix_descs[i].self,
1505                                                    &infos[i], fix_tmp, i);
1506       }
1507       if (fix_descs[i].pnext.mask) {
1508          vn_fix_graphics_pipeline_create_info_pnext(&fix_descs[i].pnext,
1509                                                     &infos[i], fix_tmp, i);
1510       }
1511    }
1512 
1513    *out_fix_tmp = fix_tmp;
1514    return fix_tmp->infos;
1515 }
1516 
1517 /**
1518  * We invalidate each VkPipelineCreationFeedback. This is a legal but useless
1519  * implementation.
1520  *
1521  * We invalidate because the venus protocol (as of 2022-08-25) does not know
1522  * that the VkPipelineCreationFeedback structs in the
1523  * VkGraphicsPipelineCreateInfo pNext are output parameters. Before
1524  * VK_EXT_pipeline_creation_feedback, the pNext chain was input-only.
1525  */
1526 static void
vn_invalidate_pipeline_creation_feedback(const VkBaseInStructure * chain)1527 vn_invalidate_pipeline_creation_feedback(const VkBaseInStructure *chain)
1528 {
1529    const VkPipelineCreationFeedbackCreateInfo *feedback_info =
1530       vk_find_struct_const(chain, PIPELINE_CREATION_FEEDBACK_CREATE_INFO);
1531 
1532    if (!feedback_info)
1533       return;
1534 
1535    feedback_info->pPipelineCreationFeedback->flags = 0;
1536 
1537    for (uint32_t i = 0; i < feedback_info->pipelineStageCreationFeedbackCount;
1538         i++)
1539       feedback_info->pPipelineStageCreationFeedbacks[i].flags = 0;
1540 }
1541 
1542 static inline VkPipelineCreateFlags2
vn_pipeline_create_flags2(const void * pnext,VkPipelineCreateFlags flags)1543 vn_pipeline_create_flags2(const void *pnext, VkPipelineCreateFlags flags)
1544 {
1545    const VkPipelineCreateFlags2CreateInfo *flags2 =
1546       vk_find_struct_const(pnext, PIPELINE_CREATE_FLAGS_2_CREATE_INFO);
1547    return flags2 ? flags2->flags : flags;
1548 }
1549 
1550 VkResult
vn_CreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)1551 vn_CreateGraphicsPipelines(VkDevice device,
1552                            VkPipelineCache pipelineCache,
1553                            uint32_t createInfoCount,
1554                            const VkGraphicsPipelineCreateInfo *pCreateInfos,
1555                            const VkAllocationCallbacks *pAllocator,
1556                            VkPipeline *pPipelines)
1557 {
1558    struct vn_device *dev = vn_device_from_handle(device);
1559    const VkAllocationCallbacks *alloc =
1560       pAllocator ? pAllocator : &dev->base.base.alloc;
1561    bool want_sync = false;
1562    VkResult result;
1563 
1564    /* silence -Wmaybe-uninitialized false alarm on release build with gcc */
1565    if (!createInfoCount)
1566       return VK_SUCCESS;
1567 
1568    memset(pPipelines, 0, sizeof(*pPipelines) * createInfoCount);
1569 
1570    if (!vn_create_pipeline_handles(dev, VN_PIPELINE_TYPE_GRAPHICS,
1571                                    createInfoCount, pPipelines, alloc)) {
1572       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1573    }
1574 
1575    STACK_ARRAY(struct vn_graphics_pipeline_fix_desc, fix_descs,
1576                createInfoCount);
1577    for (uint32_t i = 0; i < createInfoCount; i++) {
1578       struct vn_graphics_pipeline *pipeline =
1579          vn_graphics_pipeline_from_handle(pPipelines[i]);
1580 
1581       const VkPipelineCreateFlags2 flags2 = vn_pipeline_create_flags2(
1582          pCreateInfos[i].pNext, pCreateInfos[i].flags);
1583       if (flags2 & VN_PIPELINE_CREATE_SYNC_MASK)
1584          want_sync = true;
1585 
1586       vn_graphics_pipeline_state_fill(&pCreateInfos[i], flags2,
1587                                       &pipeline->state, &fix_descs[i]);
1588    }
1589 
1590    struct vn_graphics_pipeline_fix_tmp *fix_tmp = NULL;
1591    pCreateInfos = vn_fix_graphics_pipeline_create_infos(
1592       dev, createInfoCount, pCreateInfos, fix_descs, &fix_tmp, alloc);
1593    if (!pCreateInfos) {
1594       vn_destroy_pipeline_handles(dev, createInfoCount, pPipelines, alloc);
1595       STACK_ARRAY_FINISH(fix_descs);
1596       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1597    }
1598 
1599    for (uint32_t i = 0; i < createInfoCount; i++) {
1600       struct vn_pipeline *pipeline = vn_pipeline_from_handle(pPipelines[i]);
1601       struct vn_pipeline_layout *layout =
1602          vn_pipeline_layout_from_handle(pCreateInfos[i].layout);
1603       if (layout && (layout->push_descriptor_set_layout ||
1604                      layout->has_push_constant_ranges)) {
1605          pipeline->layout = vn_pipeline_layout_ref(dev, layout);
1606       }
1607 
1608       vn_invalidate_pipeline_creation_feedback(
1609          (const VkBaseInStructure *)pCreateInfos[i].pNext);
1610    }
1611 
1612    struct vn_ring *target_ring = vn_get_target_ring(dev);
1613    if (!target_ring) {
1614       vk_free(alloc, fix_tmp);
1615       vn_destroy_pipeline_handles(dev, createInfoCount, pPipelines, alloc);
1616       STACK_ARRAY_FINISH(fix_descs);
1617       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1618    }
1619 
1620    if (want_sync || target_ring != dev->primary_ring) {
1621       if (target_ring == dev->primary_ring) {
1622          VN_TRACE_SCOPE("want sync");
1623       }
1624 
1625       result = vn_call_vkCreateGraphicsPipelines(
1626          target_ring, device, pipelineCache, createInfoCount, pCreateInfos,
1627          NULL, pPipelines);
1628       if (result != VK_SUCCESS)
1629          vn_destroy_failed_pipeline_handles(dev, createInfoCount, pPipelines,
1630                                             alloc);
1631    } else {
1632       vn_async_vkCreateGraphicsPipelines(target_ring, device, pipelineCache,
1633                                          createInfoCount, pCreateInfos, NULL,
1634                                          pPipelines);
1635       result = VK_SUCCESS;
1636    }
1637 
1638    vk_free(alloc, fix_tmp);
1639    STACK_ARRAY_FINISH(fix_descs);
1640    return vn_result(dev->instance, result);
1641 }
1642 
1643 VkResult
vn_CreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)1644 vn_CreateComputePipelines(VkDevice device,
1645                           VkPipelineCache pipelineCache,
1646                           uint32_t createInfoCount,
1647                           const VkComputePipelineCreateInfo *pCreateInfos,
1648                           const VkAllocationCallbacks *pAllocator,
1649                           VkPipeline *pPipelines)
1650 {
1651    struct vn_device *dev = vn_device_from_handle(device);
1652    const VkAllocationCallbacks *alloc =
1653       pAllocator ? pAllocator : &dev->base.base.alloc;
1654    bool want_sync = false;
1655    VkResult result;
1656 
1657    memset(pPipelines, 0, sizeof(*pPipelines) * createInfoCount);
1658 
1659    if (!vn_create_pipeline_handles(dev, VN_PIPELINE_TYPE_COMPUTE,
1660                                    createInfoCount, pPipelines, alloc))
1661       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1662 
1663    for (uint32_t i = 0; i < createInfoCount; i++) {
1664       struct vn_pipeline *pipeline = vn_pipeline_from_handle(pPipelines[i]);
1665       struct vn_pipeline_layout *layout =
1666          vn_pipeline_layout_from_handle(pCreateInfos[i].layout);
1667       if (layout->push_descriptor_set_layout ||
1668           layout->has_push_constant_ranges) {
1669          pipeline->layout = vn_pipeline_layout_ref(dev, layout);
1670       }
1671 
1672       if (vn_pipeline_create_flags2(pCreateInfos[i].pNext,
1673                                     pCreateInfos[i].flags) &
1674           VN_PIPELINE_CREATE_SYNC_MASK)
1675          want_sync = true;
1676 
1677       vn_invalidate_pipeline_creation_feedback(
1678          (const VkBaseInStructure *)pCreateInfos[i].pNext);
1679    }
1680 
1681    struct vn_ring *target_ring = vn_get_target_ring(dev);
1682    if (!target_ring) {
1683       vn_destroy_pipeline_handles(dev, createInfoCount, pPipelines, alloc);
1684       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1685    }
1686 
1687    if (want_sync || target_ring != dev->primary_ring) {
1688       result = vn_call_vkCreateComputePipelines(
1689          target_ring, device, pipelineCache, createInfoCount, pCreateInfos,
1690          NULL, pPipelines);
1691       if (result != VK_SUCCESS)
1692          vn_destroy_failed_pipeline_handles(dev, createInfoCount, pPipelines,
1693                                             alloc);
1694    } else {
1695       vn_async_vkCreateComputePipelines(target_ring, device, pipelineCache,
1696                                         createInfoCount, pCreateInfos, NULL,
1697                                         pPipelines);
1698       result = VK_SUCCESS;
1699    }
1700 
1701    return vn_result(dev->instance, result);
1702 }
1703 
1704 void
vn_DestroyPipeline(VkDevice device,VkPipeline _pipeline,const VkAllocationCallbacks * pAllocator)1705 vn_DestroyPipeline(VkDevice device,
1706                    VkPipeline _pipeline,
1707                    const VkAllocationCallbacks *pAllocator)
1708 {
1709    struct vn_device *dev = vn_device_from_handle(device);
1710    struct vn_pipeline *pipeline = vn_pipeline_from_handle(_pipeline);
1711    const VkAllocationCallbacks *alloc =
1712       pAllocator ? pAllocator : &dev->base.base.alloc;
1713 
1714    if (!pipeline)
1715       return;
1716 
1717    if (pipeline->layout) {
1718       vn_pipeline_layout_unref(dev, pipeline->layout);
1719    }
1720 
1721    vn_async_vkDestroyPipeline(dev->primary_ring, device, _pipeline, NULL);
1722 
1723    vn_object_base_fini(&pipeline->base);
1724    vk_free(alloc, pipeline);
1725 }
1726