• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_pipeline.h"
12 
13 #include "venus-protocol/vn_protocol_driver_pipeline.h"
14 #include "venus-protocol/vn_protocol_driver_pipeline_cache.h"
15 #include "venus-protocol/vn_protocol_driver_pipeline_layout.h"
16 #include "venus-protocol/vn_protocol_driver_shader_module.h"
17 
18 #include "vn_descriptor_set.h"
19 #include "vn_device.h"
20 #include "vn_physical_device.h"
21 #include "vn_render_pass.h"
22 
23 /**
24  * Fields in the VkGraphicsPipelineCreateInfo pNext chain that we must track
25  * to determine which fields are valid and which must be erased.
26  */
27 struct vn_graphics_pipeline_info_self {
28    union {
29       /* Bitmask exists for testing if any field is set. */
30       uint32_t mask;
31 
32       /* Group the fixes by Vulkan struct. Within each group, sort by struct
33        * order.
34        */
35       struct {
36          /** VkGraphicsPipelineCreateInfo::pStages */
37          bool shader_stages : 1;
38          /** VkGraphicsPipelineCreateInfo::pVertexInputState */
39          bool vertex_input_state : 1;
40          /** VkGraphicsPipelineCreateInfo::pInputAssemblyState */
41          bool input_assembly_state : 1;
42          /** VkGraphicsPipelineCreateInfo::pTessellationState */
43          bool tessellation_state : 1;
44          /** VkGraphicsPipelineCreateInfo::pViewportState */
45          bool viewport_state : 1;
46          /** VkGraphicsPipelineCreateInfo::pRasterizationState */
47          bool rasterization_state : 1;
48          /** VkGraphicsPipelineCreateInfo::pMultisampleState */
49          bool multisample_state : 1;
50          /** VkGraphicsPipelineCreateInfo::pDepthStencilState */
51          bool depth_stencil_state : 1;
52          /** VkGraphicsPipelineCreateInfo::pColorBlendState */
53          bool color_blend_state : 1;
54          /** VkGraphicsPipelineCreateInfo::layout */
55          bool pipeline_layout : 1;
56          /** VkGraphicsPipelineCreateInfo::renderPass */
57          bool render_pass : 1;
58          /** VkGraphicsPipelineCreateInfo::basePipelineHandle */
59          bool base_pipeline_handle : 1;
60 
61          /** VkPipelineViewportStateCreateInfo::pViewports */
62          bool viewport_state_viewports : 1;
63          /** VkPipelineViewportStateCreateInfo::pScissors */
64          bool viewport_state_scissors : 1;
65 
66          /** VkPipelineMultisampleStateCreateInfo::pSampleMask */
67          bool multisample_state_sample_mask : 1;
68       };
69    };
70 };
71 
72 static_assert(sizeof(struct vn_graphics_pipeline_info_self) ==
73                  sizeof(((struct vn_graphics_pipeline_info_self){}).mask),
74               "vn_graphics_pipeline_create_info_self::mask is too small");
75 
76 /**
77  * Fields in the VkGraphicsPipelineCreateInfo pNext chain that we must track
78  * to determine which fields are valid and which must be erased.
79  */
80 struct vn_graphics_pipeline_info_pnext {
81    union {
82       /* Bitmask exists for testing if any field is set. */
83       uint32_t mask;
84 
85       /* Group the fixes by Vulkan struct. Within each group, sort by struct
86        * order.
87        */
88       struct {
89          /** VkPipelineRenderingCreateInfo, all format fields */
90          bool rendering_info_formats : 1;
91       };
92    };
93 };
94 
95 static_assert(sizeof(struct vn_graphics_pipeline_info_pnext) ==
96                  sizeof(((struct vn_graphics_pipeline_info_pnext){}).mask),
97               "vn_graphics_pipeline_create_info_pnext::mask is too small");
98 
99 /**
100  * Description of fixes needed for a single VkGraphicsPipelineCreateInfo
101  * pNext chain.
102  */
103 struct vn_graphics_pipeline_fix_desc {
104    struct vn_graphics_pipeline_info_self self;
105    struct vn_graphics_pipeline_info_pnext pnext;
106 };
107 
108 /**
109  * Typesafe bitmask for VkGraphicsPipelineLibraryFlagsEXT. Named members
110  * reduce long lines.
111  *
112  * From the Vulkan 1.3.215 spec:
113  *
114  *    The state required for a graphics pipeline is divided into vertex input
115  *    state, pre-rasterization shader state, fragment shader state, and
116  *    fragment output state.
117  */
118 struct vn_graphics_pipeline_library_state {
119    union {
120       VkGraphicsPipelineLibraryFlagsEXT mask;
121 
122       struct {
123          /** VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT */
124          bool vertex_input : 1;
125          /** VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT */
126          bool pre_raster_shaders : 1;
127          /** VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT */
128          bool fragment_shader : 1;
129          /** VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT */
130          bool fragment_output : 1;
131       };
132    };
133 };
134 
135 /**
136  * Compact bitmask for the subset of graphics VkDynamicState that
137  * venus needs to track. Named members reduce long lines.
138  *
139  * We want a *compact* bitmask because enum VkDynamicState has large gaps due
140  * to extensions.
141  */
142 struct vn_graphics_dynamic_state {
143    union {
144       uint32_t mask;
145 
146       struct {
147          /** VK_DYNAMIC_STATE_VERTEX_INPUT_EXT **/
148          bool vertex_input : 1;
149          /** VK_DYNAMIC_STATE_VIEWPORT */
150          bool viewport : 1;
151          /** VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT */
152          bool viewport_with_count : 1;
153          /** VK_DYNAMIC_STATE_SAMPLE_MASK_EXT */
154          bool sample_mask : 1;
155          /** VK_DYNAMIC_STATE_SCISSOR */
156          bool scissor : 1;
157          /** VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT */
158          bool scissor_with_count : 1;
159          /** VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE */
160          bool rasterizer_discard_enable : 1;
161       };
162    };
163 };
164 
165 /**
166  * Graphics pipeline state that Venus tracks to determine which fixes are
167  * required in the VkGraphicsPipelineCreateInfo pNext chain.
168  *
169  * This is the pipeline's fully linked state. That is, it includes the state
170  * provided directly in VkGraphicsPipelineCreateInfo and the state provided
171  * indirectly in VkPipelineLibraryCreateInfoKHR.
172  */
173 struct vn_graphics_pipeline_state {
174    /** The GPL state subsets that the pipeline provides. */
175    struct vn_graphics_pipeline_library_state gpl;
176 
177    struct vn_graphics_dynamic_state dynamic;
178    VkShaderStageFlags shader_stages;
179 
180    struct vn_render_pass_state {
181       /**
182        * The attachment aspects accessed by the pipeline.
183        *
184        * Valid if and only if VK_IMAGE_ASPECT_METADATA_BIT is unset.
185        *
186        * In a complete pipeline, this must be valid (and may be empty). In
187        * a pipeline library, this may be invalid. We initialize this to be
188        * invalid, and it remains invalid until we read the attachment info in
189        * the VkGraphicsPipelineCreateInfo chain.
190        *
191        * The app provides the attachment info in
192        * VkGraphicsPipelineCreateInfo::renderPass or
193        * VkPipelineRenderingCreateInfo, but the validity of that info depends
194        * on VkGraphicsPipelineLibraryFlagsEXT.
195        */
196       VkImageAspectFlags attachment_aspects;
197    } render_pass;
198 
199    /** VkPipelineRasterizationStateCreateInfo::rasterizerDiscardEnable
200     *
201     * Valid if and only if gpl.pre_raster_shaders is set.
202     */
203    bool rasterizer_discard_enable;
204 };
205 
206 struct vn_graphics_pipeline {
207    struct vn_pipeline base;
208    struct vn_graphics_pipeline_state state;
209 };
210 
211 /**
212  * Temporary storage for fixes in vkCreateGraphicsPipelines.
213  *
214  * Length of each array is vkCreateGraphicsPipelines::createInfoCount.
215  */
216 struct vn_graphics_pipeline_fix_tmp {
217    VkGraphicsPipelineCreateInfo *infos;
218    VkPipelineMultisampleStateCreateInfo *multisample_state_infos;
219    VkPipelineViewportStateCreateInfo *viewport_state_infos;
220 
221    /* Fixing the pNext chain
222     *
223     * TODO: extend when below or more extensions are supported:
224     * - VK_KHR_maintenance5
225     * - VK_KHR_fragment_shading_rate
226     * - VK_EXT_pipeline_robustness
227     */
228    VkGraphicsPipelineLibraryCreateInfoEXT *gpl_infos;
229    VkPipelineCreationFeedbackCreateInfo *feedback_infos;
230    VkPipelineLibraryCreateInfoKHR *library_infos;
231    VkPipelineRenderingCreateInfo *rendering_infos;
232 };
233 
234 /* shader module commands */
235 
236 VkResult
vn_CreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)237 vn_CreateShaderModule(VkDevice device,
238                       const VkShaderModuleCreateInfo *pCreateInfo,
239                       const VkAllocationCallbacks *pAllocator,
240                       VkShaderModule *pShaderModule)
241 {
242    struct vn_device *dev = vn_device_from_handle(device);
243    const VkAllocationCallbacks *alloc =
244       pAllocator ? pAllocator : &dev->base.base.alloc;
245 
246    struct vn_shader_module *mod =
247       vk_zalloc(alloc, sizeof(*mod), VN_DEFAULT_ALIGN,
248                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
249    if (!mod)
250       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
251 
252    vn_object_base_init(&mod->base, VK_OBJECT_TYPE_SHADER_MODULE, &dev->base);
253 
254    VkShaderModule mod_handle = vn_shader_module_to_handle(mod);
255    vn_async_vkCreateShaderModule(dev->primary_ring, device, pCreateInfo, NULL,
256                                  &mod_handle);
257 
258    *pShaderModule = mod_handle;
259 
260    return VK_SUCCESS;
261 }
262 
263 void
vn_DestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)264 vn_DestroyShaderModule(VkDevice device,
265                        VkShaderModule shaderModule,
266                        const VkAllocationCallbacks *pAllocator)
267 {
268    struct vn_device *dev = vn_device_from_handle(device);
269    struct vn_shader_module *mod = vn_shader_module_from_handle(shaderModule);
270    const VkAllocationCallbacks *alloc =
271       pAllocator ? pAllocator : &dev->base.base.alloc;
272 
273    if (!mod)
274       return;
275 
276    vn_async_vkDestroyShaderModule(dev->primary_ring, device, shaderModule,
277                                   NULL);
278 
279    vn_object_base_fini(&mod->base);
280    vk_free(alloc, mod);
281 }
282 
283 /* pipeline layout commands */
284 
285 static void
vn_pipeline_layout_destroy(struct vn_device * dev,struct vn_pipeline_layout * pipeline_layout)286 vn_pipeline_layout_destroy(struct vn_device *dev,
287                            struct vn_pipeline_layout *pipeline_layout)
288 {
289    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
290    if (pipeline_layout->push_descriptor_set_layout) {
291       vn_descriptor_set_layout_unref(
292          dev, pipeline_layout->push_descriptor_set_layout);
293    }
294    vn_async_vkDestroyPipelineLayout(
295       dev->primary_ring, vn_device_to_handle(dev),
296       vn_pipeline_layout_to_handle(pipeline_layout), NULL);
297 
298    vn_object_base_fini(&pipeline_layout->base);
299    vk_free(alloc, pipeline_layout);
300 }
301 
302 static inline struct vn_pipeline_layout *
vn_pipeline_layout_ref(struct vn_device * dev,struct vn_pipeline_layout * pipeline_layout)303 vn_pipeline_layout_ref(struct vn_device *dev,
304                        struct vn_pipeline_layout *pipeline_layout)
305 {
306    vn_refcount_inc(&pipeline_layout->refcount);
307    return pipeline_layout;
308 }
309 
310 static inline void
vn_pipeline_layout_unref(struct vn_device * dev,struct vn_pipeline_layout * pipeline_layout)311 vn_pipeline_layout_unref(struct vn_device *dev,
312                          struct vn_pipeline_layout *pipeline_layout)
313 {
314    if (vn_refcount_dec(&pipeline_layout->refcount))
315       vn_pipeline_layout_destroy(dev, pipeline_layout);
316 }
317 
318 VkResult
vn_CreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)319 vn_CreatePipelineLayout(VkDevice device,
320                         const VkPipelineLayoutCreateInfo *pCreateInfo,
321                         const VkAllocationCallbacks *pAllocator,
322                         VkPipelineLayout *pPipelineLayout)
323 {
324    struct vn_device *dev = vn_device_from_handle(device);
325    /* ignore pAllocator as the pipeline layout is reference-counted */
326    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
327 
328    struct vn_pipeline_layout *layout =
329       vk_zalloc(alloc, sizeof(*layout), VN_DEFAULT_ALIGN,
330                 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
331    if (!layout)
332       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
333 
334    vn_object_base_init(&layout->base, VK_OBJECT_TYPE_PIPELINE_LAYOUT,
335                        &dev->base);
336    layout->refcount = VN_REFCOUNT_INIT(1);
337 
338    for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; i++) {
339       struct vn_descriptor_set_layout *descriptor_set_layout =
340          vn_descriptor_set_layout_from_handle(pCreateInfo->pSetLayouts[i]);
341 
342       /* Avoid null derefs. pSetLayouts may contain VK_NULL_HANDLE.
343        *
344        * From the Vulkan 1.3.254 spec:
345        *    VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter
346        *
347        *    If setLayoutCount is not 0, pSetLayouts must be a valid pointer to
348        *    an array of setLayoutCount valid or VK_NULL_HANDLE
349        *    VkDescriptorSetLayout handles
350        */
351       if (descriptor_set_layout &&
352           descriptor_set_layout->is_push_descriptor) {
353          layout->push_descriptor_set_layout =
354             vn_descriptor_set_layout_ref(dev, descriptor_set_layout);
355          break;
356       }
357    }
358 
359    layout->has_push_constant_ranges = pCreateInfo->pushConstantRangeCount > 0;
360 
361    VkPipelineLayout layout_handle = vn_pipeline_layout_to_handle(layout);
362    vn_async_vkCreatePipelineLayout(dev->primary_ring, device, pCreateInfo,
363                                    NULL, &layout_handle);
364 
365    *pPipelineLayout = layout_handle;
366 
367    return VK_SUCCESS;
368 }
369 
370 void
vn_DestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)371 vn_DestroyPipelineLayout(VkDevice device,
372                          VkPipelineLayout pipelineLayout,
373                          const VkAllocationCallbacks *pAllocator)
374 {
375    struct vn_device *dev = vn_device_from_handle(device);
376    struct vn_pipeline_layout *layout =
377       vn_pipeline_layout_from_handle(pipelineLayout);
378 
379    if (!layout)
380       return;
381 
382    vn_pipeline_layout_unref(dev, layout);
383 }
384 
385 /* pipeline cache commands */
386 
387 VkResult
vn_CreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)388 vn_CreatePipelineCache(VkDevice device,
389                        const VkPipelineCacheCreateInfo *pCreateInfo,
390                        const VkAllocationCallbacks *pAllocator,
391                        VkPipelineCache *pPipelineCache)
392 {
393    struct vn_device *dev = vn_device_from_handle(device);
394    const VkAllocationCallbacks *alloc =
395       pAllocator ? pAllocator : &dev->base.base.alloc;
396 
397    struct vn_pipeline_cache *cache =
398       vk_zalloc(alloc, sizeof(*cache), VN_DEFAULT_ALIGN,
399                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
400    if (!cache)
401       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
402 
403    vn_object_base_init(&cache->base, VK_OBJECT_TYPE_PIPELINE_CACHE,
404                        &dev->base);
405 
406    VkPipelineCacheCreateInfo local_create_info;
407    if (pCreateInfo->initialDataSize) {
408       const struct vk_pipeline_cache_header *header =
409          pCreateInfo->pInitialData;
410 
411       local_create_info = *pCreateInfo;
412       local_create_info.initialDataSize -= header->header_size;
413       local_create_info.pInitialData += header->header_size;
414       pCreateInfo = &local_create_info;
415    }
416 
417    VkPipelineCache cache_handle = vn_pipeline_cache_to_handle(cache);
418    vn_async_vkCreatePipelineCache(dev->primary_ring, device, pCreateInfo,
419                                   NULL, &cache_handle);
420 
421    *pPipelineCache = cache_handle;
422 
423    return VK_SUCCESS;
424 }
425 
426 void
vn_DestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)427 vn_DestroyPipelineCache(VkDevice device,
428                         VkPipelineCache pipelineCache,
429                         const VkAllocationCallbacks *pAllocator)
430 {
431    struct vn_device *dev = vn_device_from_handle(device);
432    struct vn_pipeline_cache *cache =
433       vn_pipeline_cache_from_handle(pipelineCache);
434    const VkAllocationCallbacks *alloc =
435       pAllocator ? pAllocator : &dev->base.base.alloc;
436 
437    if (!cache)
438       return;
439 
440    vn_async_vkDestroyPipelineCache(dev->primary_ring, device, pipelineCache,
441                                    NULL);
442 
443    vn_object_base_fini(&cache->base);
444    vk_free(alloc, cache);
445 }
446 
447 static struct vn_ring *
vn_get_target_ring(struct vn_device * dev)448 vn_get_target_ring(struct vn_device *dev)
449 {
450    if (vn_tls_get_async_pipeline_create())
451       return dev->primary_ring;
452 
453    struct vn_ring *ring = vn_tls_get_ring(dev->instance);
454    if (!ring)
455       return NULL;
456 
457    if (ring != dev->primary_ring) {
458       /* Ensure pipeline create and pipeline cache retrieval dependencies are
459        * ready on the renderer side.
460        *
461        * TODO:
462        * - For pipeline create, track ring seqnos of layout and renderpass
463        *   objects it depends on, and only wait for those seqnos once.
464        * - For pipeline cache retrieval, track ring seqno of pipeline cache
465        *   object it depends on. Treat different sync mode separately.
466        */
467       vn_ring_wait_all(dev->primary_ring);
468    }
469    return ring;
470 }
471 
472 VkResult
vn_GetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)473 vn_GetPipelineCacheData(VkDevice device,
474                         VkPipelineCache pipelineCache,
475                         size_t *pDataSize,
476                         void *pData)
477 {
478    struct vn_device *dev = vn_device_from_handle(device);
479    struct vn_physical_device *physical_dev = dev->physical_device;
480    struct vn_ring *target_ring = vn_get_target_ring(dev);
481 
482    struct vk_pipeline_cache_header *header = pData;
483    VkResult result;
484    if (!pData) {
485       result = vn_call_vkGetPipelineCacheData(target_ring, device,
486                                               pipelineCache, pDataSize, NULL);
487       if (result != VK_SUCCESS)
488          return vn_error(dev->instance, result);
489 
490       *pDataSize += sizeof(*header);
491       return VK_SUCCESS;
492    }
493 
494    if (*pDataSize <= sizeof(*header)) {
495       *pDataSize = 0;
496       return VK_INCOMPLETE;
497    }
498 
499    const VkPhysicalDeviceProperties *props =
500       &physical_dev->properties.vulkan_1_0;
501    header->header_size = sizeof(*header);
502    header->header_version = VK_PIPELINE_CACHE_HEADER_VERSION_ONE;
503    header->vendor_id = props->vendorID;
504    header->device_id = props->deviceID;
505    memcpy(header->uuid, props->pipelineCacheUUID, VK_UUID_SIZE);
506 
507    *pDataSize -= header->header_size;
508    result =
509       vn_call_vkGetPipelineCacheData(target_ring, device, pipelineCache,
510                                      pDataSize, pData + header->header_size);
511    if (result < VK_SUCCESS)
512       return vn_error(dev->instance, result);
513 
514    *pDataSize += header->header_size;
515 
516    return result;
517 }
518 
519 VkResult
vn_MergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)520 vn_MergePipelineCaches(VkDevice device,
521                        VkPipelineCache dstCache,
522                        uint32_t srcCacheCount,
523                        const VkPipelineCache *pSrcCaches)
524 {
525    struct vn_device *dev = vn_device_from_handle(device);
526 
527    vn_async_vkMergePipelineCaches(dev->primary_ring, device, dstCache,
528                                   srcCacheCount, pSrcCaches);
529 
530    return VK_SUCCESS;
531 }
532 
533 /* pipeline commands */
534 
535 static struct vn_graphics_pipeline *
vn_graphics_pipeline_from_handle(VkPipeline pipeline_h)536 vn_graphics_pipeline_from_handle(VkPipeline pipeline_h)
537 {
538    struct vn_pipeline *p = vn_pipeline_from_handle(pipeline_h);
539    assert(p->type == VN_PIPELINE_TYPE_GRAPHICS);
540    return (struct vn_graphics_pipeline *)p;
541 }
542 
543 static bool
vn_create_pipeline_handles(struct vn_device * dev,enum vn_pipeline_type type,uint32_t pipeline_count,VkPipeline * pipeline_handles,const VkAllocationCallbacks * alloc)544 vn_create_pipeline_handles(struct vn_device *dev,
545                            enum vn_pipeline_type type,
546                            uint32_t pipeline_count,
547                            VkPipeline *pipeline_handles,
548                            const VkAllocationCallbacks *alloc)
549 {
550    size_t pipeline_size;
551 
552    switch (type) {
553    case VN_PIPELINE_TYPE_GRAPHICS:
554       pipeline_size = sizeof(struct vn_graphics_pipeline);
555       break;
556    case VN_PIPELINE_TYPE_COMPUTE:
557       pipeline_size = sizeof(struct vn_pipeline);
558       break;
559    }
560 
561    for (uint32_t i = 0; i < pipeline_count; i++) {
562       struct vn_pipeline *pipeline =
563          vk_zalloc(alloc, pipeline_size, VN_DEFAULT_ALIGN,
564                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
565 
566       if (!pipeline) {
567          for (uint32_t j = 0; j < i; j++) {
568             pipeline = vn_pipeline_from_handle(pipeline_handles[j]);
569             vn_object_base_fini(&pipeline->base);
570             vk_free(alloc, pipeline);
571          }
572 
573          memset(pipeline_handles, 0,
574                 pipeline_count * sizeof(pipeline_handles[0]));
575          return false;
576       }
577 
578       vn_object_base_init(&pipeline->base, VK_OBJECT_TYPE_PIPELINE,
579                           &dev->base);
580       pipeline->type = type;
581       pipeline_handles[i] = vn_pipeline_to_handle(pipeline);
582    }
583 
584    return true;
585 }
586 
587 /** For vkCreate*Pipelines.  */
588 static void
vn_destroy_failed_pipelines(struct vn_device * dev,uint32_t create_info_count,VkPipeline * pipelines,const VkAllocationCallbacks * alloc)589 vn_destroy_failed_pipelines(struct vn_device *dev,
590                             uint32_t create_info_count,
591                             VkPipeline *pipelines,
592                             const VkAllocationCallbacks *alloc)
593 {
594    for (uint32_t i = 0; i < create_info_count; i++) {
595       struct vn_pipeline *pipeline = vn_pipeline_from_handle(pipelines[i]);
596 
597       if (pipeline->base.id == 0) {
598          if (pipeline->layout) {
599             vn_pipeline_layout_unref(dev, pipeline->layout);
600          }
601          vn_object_base_fini(&pipeline->base);
602          vk_free(alloc, pipeline);
603          pipelines[i] = VK_NULL_HANDLE;
604       }
605    }
606 }
607 
608 #define VN_PIPELINE_CREATE_SYNC_MASK                                         \
609    (VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT |               \
610     VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT)
611 
612 static struct vn_graphics_pipeline_fix_tmp *
vn_graphics_pipeline_fix_tmp_alloc(const VkAllocationCallbacks * alloc,uint32_t info_count,bool alloc_pnext)613 vn_graphics_pipeline_fix_tmp_alloc(const VkAllocationCallbacks *alloc,
614                                    uint32_t info_count,
615                                    bool alloc_pnext)
616 {
617    struct vn_graphics_pipeline_fix_tmp *tmp;
618    VkGraphicsPipelineCreateInfo *infos;
619    VkPipelineMultisampleStateCreateInfo *multisample_state_infos;
620    VkPipelineViewportStateCreateInfo *viewport_state_infos;
621 
622    /* for pNext */
623    VkGraphicsPipelineLibraryCreateInfoEXT *gpl_infos;
624    VkPipelineCreationFeedbackCreateInfo *feedback_infos;
625    VkPipelineLibraryCreateInfoKHR *library_infos;
626    VkPipelineRenderingCreateInfo *rendering_infos;
627 
628    VK_MULTIALLOC(ma);
629    vk_multialloc_add(&ma, &tmp, __typeof__(*tmp), 1);
630    vk_multialloc_add(&ma, &infos, __typeof__(*infos), info_count);
631    vk_multialloc_add(&ma, &multisample_state_infos,
632                      __typeof__(*multisample_state_infos), info_count);
633    vk_multialloc_add(&ma, &viewport_state_infos,
634                      __typeof__(*viewport_state_infos), info_count);
635 
636    if (alloc_pnext) {
637       vk_multialloc_add(&ma, &gpl_infos, __typeof__(*gpl_infos), info_count);
638       vk_multialloc_add(&ma, &feedback_infos, __typeof__(*feedback_infos),
639                         info_count);
640       vk_multialloc_add(&ma, &library_infos, __typeof__(*library_infos),
641                         info_count);
642       vk_multialloc_add(&ma, &rendering_infos, __typeof__(*rendering_infos),
643                         info_count);
644    }
645 
646    if (!vk_multialloc_zalloc(&ma, alloc, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND))
647       return NULL;
648 
649    tmp->infos = infos;
650    tmp->multisample_state_infos = multisample_state_infos;
651    tmp->viewport_state_infos = viewport_state_infos;
652 
653    if (alloc_pnext) {
654       tmp->gpl_infos = gpl_infos;
655       tmp->feedback_infos = feedback_infos;
656       tmp->library_infos = library_infos;
657       tmp->rendering_infos = rendering_infos;
658    }
659 
660    return tmp;
661 }
662 
663 /**
664  * Update \a gpl with the VkGraphicsPipelineLibraryFlagsEXT that the pipeline
665  * provides directly (without linking). The spec says that the pipeline always
666  * provides flags, but may do it implicitly.
667  *
668  * From the Vulkan 1.3.251 spec:
669  *
670  *    If this structure [VkGraphicsPipelineLibraryCreateInfoEXT] is
671  *    omitted, and either VkGraphicsPipelineCreateInfo::flags includes
672  *    VK_PIPELINE_CREATE_LIBRARY_BIT_KHR or the
673  *    VkGraphicsPipelineCreateInfo::pNext chain includes
674  *    a VkPipelineLibraryCreateInfoKHR structure with a libraryCount
675  *    greater than 0, it is as if flags is 0. Otherwise if this
676  *    structure is omitted, it is as if flags includes all possible subsets
677  *    of the graphics pipeline (i.e. a complete graphics pipeline).
678  */
679 static void
vn_graphics_pipeline_library_state_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state * restrict gpl)680 vn_graphics_pipeline_library_state_update(
681    const VkGraphicsPipelineCreateInfo *info,
682    struct vn_graphics_pipeline_library_state *restrict gpl)
683 {
684    const VkGraphicsPipelineLibraryCreateInfoEXT *gpl_info =
685       vk_find_struct_const(info->pNext,
686                            GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT);
687    const VkPipelineLibraryCreateInfoKHR *lib_info =
688       vk_find_struct_const(info->pNext, PIPELINE_LIBRARY_CREATE_INFO_KHR);
689    const uint32_t lib_count = lib_info ? lib_info->libraryCount : 0;
690 
691    if (gpl_info) {
692       gpl->mask |= gpl_info->flags;
693    } else if ((info->flags & VK_PIPELINE_CREATE_LIBRARY_BIT_KHR) ||
694               lib_count > 0) {
695       gpl->mask |= 0;
696    } else {
697       gpl->mask |=
698          VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT |
699          VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT |
700          VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT |
701          VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT;
702    }
703 }
704 
705 /**
706  * Update \a dynamic with the VkDynamicState that the pipeline provides
707  * directly (without linking).
708  *
709  * \a direct_gpl The VkGraphicsPipelineLibraryFlagsEXT that the pipeline sets
710  *    directly (without linking).
711  */
712 static void
vn_graphics_dynamic_state_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state direct_gpl,struct vn_graphics_dynamic_state * restrict dynamic)713 vn_graphics_dynamic_state_update(
714    const VkGraphicsPipelineCreateInfo *info,
715    struct vn_graphics_pipeline_library_state direct_gpl,
716    struct vn_graphics_dynamic_state *restrict dynamic)
717 {
718    const VkPipelineDynamicStateCreateInfo *dyn_info = info->pDynamicState;
719    if (!dyn_info)
720       return;
721 
722    struct vn_graphics_dynamic_state raw = { 0 };
723 
724    for (uint32_t i = 0; i < dyn_info->dynamicStateCount; i++) {
725       switch (dyn_info->pDynamicStates[i]) {
726       case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT:
727          raw.vertex_input = true;
728          break;
729       case VK_DYNAMIC_STATE_VIEWPORT:
730          raw.viewport = true;
731          break;
732       case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT:
733          raw.viewport_with_count = true;
734          break;
735       case VK_DYNAMIC_STATE_SAMPLE_MASK_EXT:
736          raw.sample_mask = true;
737          break;
738       case VK_DYNAMIC_STATE_SCISSOR:
739          raw.scissor = true;
740          break;
741       case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT:
742          raw.scissor_with_count = true;
743          break;
744       case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE:
745          raw.rasterizer_discard_enable = true;
746          break;
747       default:
748          break;
749       }
750    }
751 
752    /* We must ignore VkDynamicState unrelated to the
753     * VkGraphicsPipelineLibraryFlagsEXT that the pipeline provides directly
754     * (without linking).
755     *
756     *    [Vulkan 1.3.252]
757     *    Dynamic state values set via pDynamicState must be ignored if the
758     *    state they correspond to is not otherwise statically set by one of
759     *    the state subsets used to create the pipeline.
760     *
761     * In general, we must update dynamic state bits with `|=` rather than `=`
762     * because multiple GPL state subsets can enable the same dynamic state.
763     *
764     *    [Vulkan 1.3.252]
765     *    Any linked library that has dynamic state enabled that same dynamic
766     *    state must also be enabled in all the other linked libraries to which
767     *    that dynamic state applies.
768     */
769    if (direct_gpl.vertex_input) {
770       dynamic->vertex_input |= raw.vertex_input;
771    }
772    if (direct_gpl.pre_raster_shaders) {
773       dynamic->viewport |= raw.viewport;
774       dynamic->viewport_with_count |= raw.viewport_with_count;
775       dynamic->scissor |= raw.scissor;
776       dynamic->scissor_with_count |= raw.scissor_with_count;
777       dynamic->rasterizer_discard_enable |= raw.rasterizer_discard_enable;
778    }
779    if (direct_gpl.fragment_shader) {
780       dynamic->sample_mask |= raw.sample_mask;
781    }
782    if (direct_gpl.fragment_output) {
783       dynamic->sample_mask |= raw.sample_mask;
784    }
785 }
786 
787 /**
788  * Update \a shader_stages with the VkShaderStageFlags that the pipeline
789  * provides directly (without linking).
790  *
791  * \a direct_gpl The VkGraphicsPipelineLibraryFlagsEXT that the pipeline sets
792  *    directly (without linking).
793  */
794 static void
vn_graphics_shader_stages_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state direct_gpl,struct vn_graphics_pipeline_fix_desc * restrict valid,VkShaderStageFlags * restrict shader_stages)795 vn_graphics_shader_stages_update(
796    const VkGraphicsPipelineCreateInfo *info,
797    struct vn_graphics_pipeline_library_state direct_gpl,
798    struct vn_graphics_pipeline_fix_desc *restrict valid,
799    VkShaderStageFlags *restrict shader_stages)
800 {
801    /* From the Vulkan 1.3.251 spec:
802     *
803     *    VUID-VkGraphicsPipelineCreateInfo-flags-06640
804     *
805     *    If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes
806     *    VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT or
807     *    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, pStages must be
808     *    a valid pointer to an array of stageCount valid
809     *    VkPipelineShaderStageCreateInfo structures
810     */
811    if (!direct_gpl.pre_raster_shaders && !direct_gpl.fragment_shader)
812       return;
813 
814    valid->self.shader_stages = true;
815 
816    for (uint32_t i = 0; i < info->stageCount; i++) {
817       /* We do not need to ignore the stages irrelevant to the GPL flags.
818        * The following VUs require the app to provide only relevant stages.
819        *
820        * VUID-VkGraphicsPipelineCreateInfo-pStages-06894
821        * VUID-VkGraphicsPipelineCreateInfo-pStages-06895
822        * VUID-VkGraphicsPipelineCreateInfo-pStages-06896
823        */
824       *shader_stages |= info->pStages[i].stage;
825    }
826 }
827 
828 /**
829  * Update the render pass state with the state that the pipeline provides
830  * directly (without linking).
831  *
832  * \a direct_gpl The VkGraphicsPipelineLibraryFlagsEXT that the pipeline sets
833  *    directly (without linking).
834  */
835 static void
vn_render_pass_state_update(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_library_state direct_gpl,struct vn_graphics_pipeline_fix_desc * restrict valid,struct vn_render_pass_state * restrict state)836 vn_render_pass_state_update(
837    const VkGraphicsPipelineCreateInfo *info,
838    struct vn_graphics_pipeline_library_state direct_gpl,
839    struct vn_graphics_pipeline_fix_desc *restrict valid,
840    struct vn_render_pass_state *restrict state)
841 {
842    /* We must set validity before early returns, to ensure we don't erase
843     * valid info during fixup.  We must not erase valid info because, even if
844     * we don't read it, the host driver may read it.
845     */
846 
847    /* VUID-VkGraphicsPipelineCreateInfo-flags-06643
848     *
849     * If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes
850     * VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT, or
851     * VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT,
852     * VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT, and
853     * renderPass is not VK_NULL_HANDLE, renderPass must be a valid
854     * VkRenderPass handle
855     */
856    valid->self.render_pass |= direct_gpl.pre_raster_shaders ||
857                               direct_gpl.fragment_shader ||
858                               direct_gpl.fragment_output;
859 
860    /* VUID-VkGraphicsPipelineCreateInfo-renderPass-06579
861     *
862     * If the pipeline requires fragment output interface state, and renderPass
863     * is VK_NULL_HANDLE, and
864     * VkPipelineRenderingCreateInfo::colorAttachmentCount is not 0,
865     * VkPipelineRenderingCreateInfo::pColorAttachmentFormats must be a valid
866     * pointer to an array of colorAttachmentCount valid VkFormat values
867     *
868     * VUID-VkGraphicsPipelineCreateInfo-renderPass-06580
869     *
870     * If the pipeline requires fragment output interface state, and renderPass
871     * is VK_NULL_HANDLE, each element of
872     * VkPipelineRenderingCreateInfo::pColorAttachmentFormats must be a valid
873     * VkFormat value
874     */
875    valid->pnext.rendering_info_formats |=
876       direct_gpl.fragment_output && !info->renderPass;
877 
878    if (state->attachment_aspects != VK_IMAGE_ASPECT_METADATA_BIT) {
879       /* We have previously collected the pipeline's attachment aspects.  We
880        * do not need to inspect the attachment info again because VUs ensure
881        * that all valid render pass info used to create the pipeline and its
882        * linked pipelines are compatible.  Ignored info is not required to be
883        * compatible across linked pipeline libraries. An example of ignored
884        * info is VkPipelineRenderingCreateInfo::pColorAttachmentFormats
885        * without
886        * VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT.
887        *
888        * VUID-VkGraphicsPipelineCreateInfo-renderpass-06625
889        * VUID-VkGraphicsPipelineCreateInfo-pLibraries-06628
890        */
891       return;
892    }
893 
894    if (valid->self.render_pass && info->renderPass) {
895       struct vn_render_pass *pass =
896          vn_render_pass_from_handle(info->renderPass);
897       state->attachment_aspects =
898          pass->subpasses[info->subpass].attachment_aspects;
899       return;
900    }
901 
902    if (valid->pnext.rendering_info_formats) {
903       state->attachment_aspects = 0;
904 
905       /* From the Vulkan 1.3.255 spec:
906        *
907        *    When a pipeline is created without a VkRenderPass, if this
908        *    structure [VkPipelineRenderingCreateInfo] is present in the pNext
909        *    chain of VkGraphicsPipelineCreateInfo, it specifies the view mask
910        *    and format of attachments used for rendering.  If this structure
911        *    is not specified, and the pipeline does not include
912        *    a VkRenderPass, viewMask and colorAttachmentCount are 0, and
913        *    depthAttachmentFormat and stencilAttachmentFormat are
914        *    VK_FORMAT_UNDEFINED. If a graphics pipeline is created with
915        *    a valid VkRenderPass, parameters of this structure are ignored.
916        *
917        * However, other spec text clearly states that the format members of
918        * VkPipelineRenderingCreateInfo are ignored unless the pipeline
919        * provides fragment output interface state directly (without linking).
920        */
921       const VkPipelineRenderingCreateInfo *r_info =
922          vk_find_struct_const(info->pNext, PIPELINE_RENDERING_CREATE_INFO);
923 
924       if (r_info) {
925          for (uint32_t i = 0; i < r_info->colorAttachmentCount; i++) {
926             if (r_info->pColorAttachmentFormats[i]) {
927                state->attachment_aspects |= VK_IMAGE_ASPECT_COLOR_BIT;
928                break;
929             }
930          }
931          if (r_info->depthAttachmentFormat)
932             state->attachment_aspects |= VK_IMAGE_ASPECT_DEPTH_BIT;
933          if (r_info->stencilAttachmentFormat)
934             state->attachment_aspects |= VK_IMAGE_ASPECT_STENCIL_BIT;
935       }
936 
937       return;
938    }
939 
940    /* Aspects remain invalid. */
941    assert(state->attachment_aspects == VK_IMAGE_ASPECT_METADATA_BIT);
942 }
943 
944 static void
vn_graphics_pipeline_state_merge(struct vn_graphics_pipeline_state * restrict dst,const struct vn_graphics_pipeline_state * restrict src)945 vn_graphics_pipeline_state_merge(
946    struct vn_graphics_pipeline_state *restrict dst,
947    const struct vn_graphics_pipeline_state *restrict src)
948 {
949    /* The Vulkan 1.3.251 spec says:
950     *    VUID-VkGraphicsPipelineCreateInfo-pLibraries-06611
951     *
952     *    Any pipeline libraries included via
953     *    VkPipelineLibraryCreateInfoKHR::pLibraries must not include any state
954     *    subset already defined by this structure or defined by any other
955     *    pipeline library in VkPipelineLibraryCreateInfoKHR::pLibraries
956     */
957    assert(!(dst->gpl.mask & src->gpl.mask));
958 
959    dst->gpl.mask |= src->gpl.mask;
960    dst->dynamic.mask |= src->dynamic.mask;
961    dst->shader_stages |= src->shader_stages;
962 
963    VkImageAspectFlags src_aspects = src->render_pass.attachment_aspects;
964    VkImageAspectFlags *dst_aspects = &dst->render_pass.attachment_aspects;
965 
966    if (src_aspects != VK_IMAGE_ASPECT_METADATA_BIT) {
967       if (*dst_aspects != VK_IMAGE_ASPECT_METADATA_BIT) {
968          /* All linked pipelines must have compatible render pass info. */
969          assert(*dst_aspects == src_aspects);
970       } else {
971          *dst_aspects = src_aspects;
972       }
973    }
974 
975    if (dst->gpl.pre_raster_shaders)
976       dst->rasterizer_discard_enable = src->rasterizer_discard_enable;
977 }
978 
979 /**
980  * Fill \a state by reading the VkGraphicsPipelineCreateInfo pNext chain,
981  * including any linked pipeline libraries. Return in \a out_fix_desc
982  * a description of required fixes to the VkGraphicsPipelineCreateInfo chain.
983  *
984  * \pre state is zero-filled
985  *
986  * The logic for choosing which struct members to ignore, and which members
987  * have valid values, is derived from the Vulkan spec sections for
988  * VkGraphicsPipelineCreateInfo, VkGraphicsPipelineLibraryCreateInfoEXT, and
989  * VkPipelineLibraryCreateInfoKHR. As of Vulkan 1.3.255, the spec text and VUs
990  * still contain inconsistencies regarding the validity of struct members, so
991  * read it carefully. Many of the VUs were written before
992  * VK_EXT_graphics_pipeline_library and never updated. (Lina's advice: Focus
993  * primarily on understanding the non-VU text, and use VUs to verify your
994  * comprehension).
995  */
996 static void
vn_graphics_pipeline_state_fill(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_state * restrict state,struct vn_graphics_pipeline_fix_desc * out_fix_desc)997 vn_graphics_pipeline_state_fill(
998    const VkGraphicsPipelineCreateInfo *info,
999    struct vn_graphics_pipeline_state *restrict state,
1000    struct vn_graphics_pipeline_fix_desc *out_fix_desc)
1001 {
1002    /* Assume that state is already zero-filled.
1003     *
1004     * Invalidate attachment_aspects.
1005     */
1006    state->render_pass.attachment_aspects = VK_IMAGE_ASPECT_METADATA_BIT;
1007 
1008    const VkPipelineRenderingCreateInfo *rendering_info =
1009       vk_find_struct_const(info->pNext, PIPELINE_RENDERING_CREATE_INFO);
1010    const VkPipelineLibraryCreateInfoKHR *lib_info =
1011       vk_find_struct_const(info->pNext, PIPELINE_LIBRARY_CREATE_INFO_KHR);
1012    const uint32_t lib_count = lib_info ? lib_info->libraryCount : 0;
1013 
1014    /* This tracks which fields have valid values in the
1015     * VkGraphicsPipelineCreateInfo pNext chain.
1016     *
1017     * We initially assume that all fields are invalid. We flip fields from
1018     * invalid to valid as we dig through the pNext chain.
1019     *
1020     * A single field may be updated at multiple locations, therefore we update
1021     * with `|=` instead of `=`.
1022     *
1023     * If `valid.foo` is set, then foo has a valid value if foo exists in the
1024     * pNext chain. Even though NULL is not a valid pointer, NULL is considered
1025     * a valid *value* for a pointer-typed variable. Same for VK_NULL_HANDLE
1026     * and Vulkan handle-typed variables.
1027     *
1028     * Conversely, if `valid.foo` remains false at the end of this function,
1029     * then the Vulkan spec permits foo to have any value. If foo has a pointer
1030     * type, it may be an invalid pointer. If foo has a Vulkan handle type, it
1031     * may be an invalid handle.
1032     */
1033    struct vn_graphics_pipeline_fix_desc valid = { 0 };
1034 
1035    /* Merge the linked pipeline libraries. */
1036    for (uint32_t i = 0; i < lib_count; i++) {
1037       struct vn_graphics_pipeline *p =
1038          vn_graphics_pipeline_from_handle(lib_info->pLibraries[i]);
1039       vn_graphics_pipeline_state_merge(state, &p->state);
1040    }
1041 
1042    /* The VkGraphicsPipelineLibraryFlagsEXT that this pipeline provides
1043     * directly (without linking).
1044     */
1045    struct vn_graphics_pipeline_library_state direct_gpl = { 0 };
1046    vn_graphics_pipeline_library_state_update(info, &direct_gpl);
1047 
1048    /* From the Vulkan 1.3.251 spec:
1049     *    VUID-VkGraphicsPipelineCreateInfo-pLibraries-06611
1050     *
1051     *    Any pipeline libraries included via
1052     *    VkPipelineLibraryCreateInfoKHR::pLibraries must not include any state
1053     *    subset already defined by this structure or defined by any other
1054     *    pipeline library in VkPipelineLibraryCreateInfoKHR::pLibraries
1055     */
1056    assert(!(direct_gpl.mask & state->gpl.mask));
1057 
1058    /* Collect orthogonal state that is common to multiple GPL state subsets. */
1059    vn_graphics_dynamic_state_update(info, direct_gpl, &state->dynamic);
1060    vn_graphics_shader_stages_update(info, direct_gpl, &valid,
1061                                     &state->shader_stages);
1062    vn_render_pass_state_update(info, direct_gpl, &valid, &state->render_pass);
1063 
1064    /* Collect remaining pre-raster shaders state.
1065     *
1066     * Of the remaining state, we must first collect the pre-raster shaders
1067     * state because it influences how the other state is collected.
1068     */
1069    if (direct_gpl.pre_raster_shaders) {
1070       valid.self.tessellation_state |=
1071          (bool)(state->shader_stages &
1072                 (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
1073                  VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
1074       valid.self.rasterization_state = true;
1075       valid.self.pipeline_layout = true;
1076 
1077       if (info->pRasterizationState) {
1078          state->rasterizer_discard_enable =
1079             info->pRasterizationState->rasterizerDiscardEnable;
1080       }
1081 
1082       const bool is_raster_statically_disabled =
1083          !state->dynamic.rasterizer_discard_enable &&
1084          state->rasterizer_discard_enable;
1085 
1086       if (!is_raster_statically_disabled) {
1087          valid.self.viewport_state = true;
1088 
1089          valid.self.viewport_state_viewports =
1090             !state->dynamic.viewport && !state->dynamic.viewport_with_count;
1091 
1092          valid.self.viewport_state_scissors =
1093             !state->dynamic.scissor && !state->dynamic.scissor_with_count;
1094       }
1095 
1096       /* Defer setting the flag until all its state is filled. */
1097       state->gpl.pre_raster_shaders = true;
1098    }
1099 
1100    /* Collect remaining vertex input interface state.
1101     *
1102     * TODO(VK_EXT_mesh_shader): Update.
1103     */
1104    if (direct_gpl.vertex_input) {
1105       const bool may_have_vertex_shader =
1106          !state->gpl.pre_raster_shaders ||
1107          (state->shader_stages & VK_SHADER_STAGE_VERTEX_BIT);
1108 
1109       valid.self.vertex_input_state |=
1110          may_have_vertex_shader && !state->dynamic.vertex_input;
1111 
1112       valid.self.input_assembly_state |= may_have_vertex_shader;
1113 
1114       /* Defer setting the flag until all its state is filled. */
1115       state->gpl.vertex_input = true;
1116    }
1117 
1118    /* Does this pipeline have rasterization statically disabled? If disabled,
1119     * then this pipeline does not directly provide fragment shader state nor
1120     * fragment output state.
1121     *
1122     * About fragment shader state, the Vulkan 1.3.254 spec says:
1123     *
1124     *    If a pipeline specifies pre-rasterization state either directly or by
1125     *    including it as a pipeline library and rasterizerDiscardEnable is set
1126     *    to VK_FALSE or VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE is used,
1127     *    this state must be specified to create a complete graphics pipeline.
1128     *
1129     *    If a pipeline includes
1130     *    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT in
1131     *    VkGraphicsPipelineLibraryCreateInfoEXT::flags either explicitly or as
1132     *    a default, and either the conditions requiring this state for
1133     *    a complete graphics pipeline are met or this pipeline does not
1134     *    specify pre-rasterization state in any way, that pipeline must
1135     *    specify this state directly.
1136     *
1137     * About fragment output state, the Vulkan 1.3.254 spec says the same, but
1138     * with VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT.
1139     */
1140    const bool is_raster_statically_disabled =
1141       state->gpl.pre_raster_shaders &&
1142       !state->dynamic.rasterizer_discard_enable &&
1143       state->rasterizer_discard_enable;
1144 
1145    /* Collect remaining fragment shader state. */
1146    if (direct_gpl.fragment_shader) {
1147       if (!is_raster_statically_disabled) {
1148          /* Validity of pMultisampleState is easy here.
1149           *
1150           *    VUID-VkGraphicsPipelineCreateInfo-pMultisampleState-06629
1151           *
1152           *    If the pipeline requires fragment shader state
1153           *    pMultisampleState must be NULL or a valid pointer to a valid
1154           *    VkPipelineMultisampleStateCreateInfo structure
1155           */
1156          valid.self.multisample_state = true;
1157 
1158          valid.self.multisample_state_sample_mask =
1159             !state->dynamic.sample_mask;
1160 
1161          if ((state->render_pass.attachment_aspects &
1162               (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
1163             valid.self.depth_stencil_state = true;
1164          } else if (state->render_pass.attachment_aspects ==
1165                        VK_IMAGE_ASPECT_METADATA_BIT &&
1166                     (info->flags & VK_PIPELINE_CREATE_LIBRARY_BIT_KHR)) {
1167             /* The app has not yet provided render pass info, neither directly
1168              * in this VkGraphicsPipelineCreateInfo nor in any linked pipeline
1169              * libraries. Therefore we do not know if the final complete
1170              * pipeline will have any depth or stencil attachments. If the
1171              * final complete pipeline does have depth or stencil attachments,
1172              * then the pipeline will use
1173              * VkPipelineDepthStencilStateCreateInfo. Therefore, we must not
1174              * ignore it.
1175              */
1176             valid.self.depth_stencil_state = true;
1177          }
1178 
1179          valid.self.pipeline_layout = true;
1180       }
1181 
1182       /* Defer setting the flag until all its state is filled. */
1183       state->gpl.fragment_shader = true;
1184    }
1185 
1186    /* Collect remaining fragment output interface state. */
1187    if (direct_gpl.fragment_output) {
1188       if (!is_raster_statically_disabled) {
1189          /* Validity of pMultisampleState is easy here.
1190           *
1191           *    VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751
1192           *
1193           *    If the pipeline requires fragment output interface state,
1194           *    pMultisampleState must be a valid pointer to a valid
1195           *    VkPipelineMultisampleStateCreateInfo structure
1196           */
1197          valid.self.multisample_state = true;
1198 
1199          valid.self.multisample_state_sample_mask =
1200             !state->dynamic.sample_mask;
1201 
1202          valid.self.color_blend_state |=
1203             (bool)(state->render_pass.attachment_aspects &
1204                    VK_IMAGE_ASPECT_COLOR_BIT);
1205          valid.self.depth_stencil_state |=
1206             (bool)(state->render_pass.attachment_aspects &
1207                    (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
1208       }
1209 
1210       /* Defer setting the flag until all its state is filled. */
1211       state->gpl.fragment_output = true;
1212    }
1213 
1214    /* After direct_gpl states collection, check the final state to validate
1215     * VkPipelineLayout in case of being the final layout in linked pipeline.
1216     *
1217     * From the Vulkan 1.3.275 spec:
1218     *    VUID-VkGraphicsPipelineCreateInfo-layout-06602
1219     *
1220     *    If the pipeline requires fragment shader state or pre-rasterization
1221     *    shader state, layout must be a valid VkPipelineLayout handle
1222     */
1223    if ((state->gpl.fragment_shader && !is_raster_statically_disabled) ||
1224        state->gpl.pre_raster_shaders)
1225       valid.self.pipeline_layout = true;
1226 
1227    /* Pipeline Derivatives
1228     *
1229     *    VUID-VkGraphicsPipelineCreateInfo-flags-07984
1230     *
1231     *    If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and
1232     *    basePipelineIndex is -1, basePipelineHandle must be a valid graphics
1233     *    VkPipeline handle
1234     */
1235    if ((info->flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) &&
1236        info->basePipelineIndex == -1)
1237       valid.self.base_pipeline_handle = true;
1238 
1239    *out_fix_desc = (struct vn_graphics_pipeline_fix_desc) {
1240       .self = {
1241          /* clang-format off */
1242          .shader_stages =
1243             !valid.self.shader_stages &&
1244             info->pStages,
1245          .vertex_input_state =
1246             !valid.self.vertex_input_state &&
1247             info->pVertexInputState,
1248          .input_assembly_state =
1249             !valid.self.input_assembly_state &&
1250             info->pInputAssemblyState,
1251          .tessellation_state =
1252             !valid.self.tessellation_state &&
1253             info->pTessellationState,
1254          .viewport_state =
1255             !valid.self.viewport_state &&
1256             info->pViewportState,
1257          .viewport_state_viewports =
1258             !valid.self.viewport_state_viewports &&
1259             valid.self.viewport_state &&
1260             info->pViewportState &&
1261             info->pViewportState->pViewports &&
1262             info->pViewportState->viewportCount,
1263          .viewport_state_scissors =
1264             !valid.self.viewport_state_scissors &&
1265             valid.self.viewport_state &&
1266             info->pViewportState &&
1267             info->pViewportState->pScissors &&
1268             info->pViewportState->scissorCount,
1269          .rasterization_state =
1270             !valid.self.rasterization_state &&
1271             info->pRasterizationState,
1272          .multisample_state =
1273             !valid.self.multisample_state &&
1274             info->pMultisampleState,
1275          .multisample_state_sample_mask =
1276             !valid.self.multisample_state_sample_mask &&
1277             valid.self.multisample_state &&
1278             info->pMultisampleState &&
1279             info->pMultisampleState->pSampleMask,
1280          .depth_stencil_state =
1281             !valid.self.depth_stencil_state &&
1282             info->pDepthStencilState,
1283          .color_blend_state =
1284             !valid.self.color_blend_state &&
1285             info->pColorBlendState,
1286          .pipeline_layout =
1287             !valid.self.pipeline_layout &&
1288             info->layout,
1289          .render_pass =
1290             !valid.self.render_pass &&
1291             info->renderPass,
1292          .base_pipeline_handle =
1293             !valid.self.base_pipeline_handle &&
1294             info->basePipelineHandle,
1295          /* clang-format on */
1296       },
1297       .pnext = {
1298          /* clang-format off */
1299          .rendering_info_formats =
1300             !valid.pnext.rendering_info_formats &&
1301             rendering_info &&
1302             rendering_info->pColorAttachmentFormats &&
1303             rendering_info->colorAttachmentCount,
1304          /* clang-format on */
1305       },
1306    };
1307 }
1308 
1309 static void
vn_fix_graphics_pipeline_create_info_self(const struct vn_graphics_pipeline_info_self * ignore,const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_fix_tmp * fix_tmp,uint32_t index)1310 vn_fix_graphics_pipeline_create_info_self(
1311    const struct vn_graphics_pipeline_info_self *ignore,
1312    const VkGraphicsPipelineCreateInfo *info,
1313    struct vn_graphics_pipeline_fix_tmp *fix_tmp,
1314    uint32_t index)
1315 {
1316    /* VkGraphicsPipelineCreateInfo */
1317    if (ignore->shader_stages) {
1318       fix_tmp->infos[index].stageCount = 0;
1319       fix_tmp->infos[index].pStages = NULL;
1320    }
1321    if (ignore->vertex_input_state)
1322       fix_tmp->infos[index].pVertexInputState = NULL;
1323    if (ignore->input_assembly_state)
1324       fix_tmp->infos[index].pInputAssemblyState = NULL;
1325    if (ignore->tessellation_state)
1326       fix_tmp->infos[index].pTessellationState = NULL;
1327    if (ignore->viewport_state)
1328       fix_tmp->infos[index].pViewportState = NULL;
1329    if (ignore->rasterization_state)
1330       fix_tmp->infos[index].pRasterizationState = NULL;
1331    if (ignore->multisample_state)
1332       fix_tmp->infos[index].pMultisampleState = NULL;
1333    if (ignore->depth_stencil_state)
1334       fix_tmp->infos[index].pDepthStencilState = NULL;
1335    if (ignore->color_blend_state)
1336       fix_tmp->infos[index].pColorBlendState = NULL;
1337    if (ignore->pipeline_layout)
1338       fix_tmp->infos[index].layout = VK_NULL_HANDLE;
1339    if (ignore->base_pipeline_handle)
1340       fix_tmp->infos[index].basePipelineHandle = VK_NULL_HANDLE;
1341 
1342    /* VkPipelineMultisampleStateCreateInfo */
1343    if (ignore->multisample_state_sample_mask) {
1344       /* Swap original pMultisampleState with temporary state. */
1345       fix_tmp->multisample_state_infos[index] = *info->pMultisampleState;
1346       fix_tmp->infos[index].pMultisampleState =
1347          &fix_tmp->multisample_state_infos[index];
1348 
1349       fix_tmp->multisample_state_infos[index].pSampleMask = NULL;
1350    }
1351 
1352    /* VkPipelineViewportStateCreateInfo */
1353    if (ignore->viewport_state_viewports || ignore->viewport_state_scissors) {
1354       /* Swap original pViewportState with temporary state. */
1355       fix_tmp->viewport_state_infos[index] = *info->pViewportState;
1356       fix_tmp->infos[index].pViewportState =
1357          &fix_tmp->viewport_state_infos[index];
1358 
1359       if (ignore->viewport_state_viewports)
1360          fix_tmp->viewport_state_infos[index].pViewports = NULL;
1361       if (ignore->viewport_state_scissors)
1362          fix_tmp->viewport_state_infos[index].pScissors = NULL;
1363    }
1364 }
1365 
1366 static void
vn_graphics_pipeline_create_info_pnext_init(const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_fix_tmp * fix_tmp,uint32_t index)1367 vn_graphics_pipeline_create_info_pnext_init(
1368    const VkGraphicsPipelineCreateInfo *info,
1369    struct vn_graphics_pipeline_fix_tmp *fix_tmp,
1370    uint32_t index)
1371 {
1372    VkGraphicsPipelineLibraryCreateInfoEXT *gpl = &fix_tmp->gpl_infos[index];
1373    VkPipelineCreationFeedbackCreateInfo *feedback =
1374       &fix_tmp->feedback_infos[index];
1375    VkPipelineLibraryCreateInfoKHR *library = &fix_tmp->library_infos[index];
1376    VkPipelineRenderingCreateInfo *rendering =
1377       &fix_tmp->rendering_infos[index];
1378 
1379    VkBaseOutStructure *cur = (void *)&fix_tmp->infos[index];
1380 
1381    vk_foreach_struct_const(src, info->pNext) {
1382       void *next = NULL;
1383       switch (src->sType) {
1384       case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT:
1385          memcpy(gpl, src, sizeof(*gpl));
1386          next = gpl;
1387          break;
1388       case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO:
1389          memcpy(feedback, src, sizeof(*feedback));
1390          next = feedback;
1391          break;
1392       case VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR:
1393          memcpy(library, src, sizeof(*library));
1394          next = library;
1395          break;
1396       case VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO:
1397          memcpy(rendering, src, sizeof(*rendering));
1398          next = rendering;
1399          break;
1400       default:
1401          break;
1402       }
1403 
1404       if (next) {
1405          cur->pNext = next;
1406          cur = next;
1407       }
1408    }
1409 
1410    cur->pNext = NULL;
1411 }
1412 
1413 static void
vn_fix_graphics_pipeline_create_info_pnext(const struct vn_graphics_pipeline_info_pnext * ignore,const VkGraphicsPipelineCreateInfo * info,struct vn_graphics_pipeline_fix_tmp * fix_tmp,uint32_t index)1414 vn_fix_graphics_pipeline_create_info_pnext(
1415    const struct vn_graphics_pipeline_info_pnext *ignore,
1416    const VkGraphicsPipelineCreateInfo *info,
1417    struct vn_graphics_pipeline_fix_tmp *fix_tmp,
1418    uint32_t index)
1419 {
1420    /* initialize pNext chain with allocated tmp storage */
1421    vn_graphics_pipeline_create_info_pnext_init(info, fix_tmp, index);
1422 
1423    /* VkPipelineRenderingCreateInfo */
1424    if (ignore->rendering_info_formats) {
1425       fix_tmp->rendering_infos[index].colorAttachmentCount = 0;
1426       fix_tmp->rendering_infos[index].pColorAttachmentFormats = NULL;
1427    }
1428 }
1429 
1430 static const VkGraphicsPipelineCreateInfo *
vn_fix_graphics_pipeline_create_infos(struct vn_device * dev,uint32_t info_count,const VkGraphicsPipelineCreateInfo * infos,const struct vn_graphics_pipeline_fix_desc fix_descs[info_count],struct vn_graphics_pipeline_fix_tmp ** out_fix_tmp,const VkAllocationCallbacks * alloc)1431 vn_fix_graphics_pipeline_create_infos(
1432    struct vn_device *dev,
1433    uint32_t info_count,
1434    const VkGraphicsPipelineCreateInfo *infos,
1435    const struct vn_graphics_pipeline_fix_desc fix_descs[info_count],
1436    struct vn_graphics_pipeline_fix_tmp **out_fix_tmp,
1437    const VkAllocationCallbacks *alloc)
1438 {
1439    uint32_t self_mask = 0;
1440    uint32_t pnext_mask = 0;
1441    for (uint32_t i = 0; i < info_count; i++) {
1442       self_mask |= fix_descs[i].self.mask;
1443       pnext_mask |= fix_descs[i].pnext.mask;
1444    }
1445 
1446    if (!self_mask && !pnext_mask) {
1447       /* No fix is needed. */
1448       *out_fix_tmp = NULL;
1449       return infos;
1450    }
1451 
1452    /* tell whether fixes are applied in tracing */
1453    VN_TRACE_SCOPE("sanitize pipeline");
1454 
1455    struct vn_graphics_pipeline_fix_tmp *fix_tmp =
1456       vn_graphics_pipeline_fix_tmp_alloc(alloc, info_count, pnext_mask);
1457    if (!fix_tmp)
1458       return NULL;
1459 
1460    memcpy(fix_tmp->infos, infos, info_count * sizeof(infos[0]));
1461 
1462    for (uint32_t i = 0; i < info_count; i++) {
1463       if (fix_descs[i].self.mask) {
1464          vn_fix_graphics_pipeline_create_info_self(&fix_descs[i].self,
1465                                                    &infos[i], fix_tmp, i);
1466       }
1467       if (fix_descs[i].pnext.mask) {
1468          vn_fix_graphics_pipeline_create_info_pnext(&fix_descs[i].pnext,
1469                                                     &infos[i], fix_tmp, i);
1470       }
1471    }
1472 
1473    *out_fix_tmp = fix_tmp;
1474    return fix_tmp->infos;
1475 }
1476 
1477 /**
1478  * We invalidate each VkPipelineCreationFeedback. This is a legal but useless
1479  * implementation.
1480  *
1481  * We invalidate because the venus protocol (as of 2022-08-25) does not know
1482  * that the VkPipelineCreationFeedback structs in the
1483  * VkGraphicsPipelineCreateInfo pNext are output parameters. Before
1484  * VK_EXT_pipeline_creation_feedback, the pNext chain was input-only.
1485  */
1486 static void
vn_invalidate_pipeline_creation_feedback(const VkBaseInStructure * chain)1487 vn_invalidate_pipeline_creation_feedback(const VkBaseInStructure *chain)
1488 {
1489    const VkPipelineCreationFeedbackCreateInfo *feedback_info =
1490       vk_find_struct_const(chain, PIPELINE_CREATION_FEEDBACK_CREATE_INFO);
1491 
1492    if (!feedback_info)
1493       return;
1494 
1495    feedback_info->pPipelineCreationFeedback->flags = 0;
1496 
1497    for (uint32_t i = 0; i < feedback_info->pipelineStageCreationFeedbackCount;
1498         i++)
1499       feedback_info->pPipelineStageCreationFeedbacks[i].flags = 0;
1500 }
1501 
1502 VkResult
vn_CreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)1503 vn_CreateGraphicsPipelines(VkDevice device,
1504                            VkPipelineCache pipelineCache,
1505                            uint32_t createInfoCount,
1506                            const VkGraphicsPipelineCreateInfo *pCreateInfos,
1507                            const VkAllocationCallbacks *pAllocator,
1508                            VkPipeline *pPipelines)
1509 {
1510    struct vn_device *dev = vn_device_from_handle(device);
1511    const VkAllocationCallbacks *alloc =
1512       pAllocator ? pAllocator : &dev->base.base.alloc;
1513    bool want_sync = false;
1514    VkResult result;
1515 
1516    memset(pPipelines, 0, sizeof(*pPipelines) * createInfoCount);
1517 
1518    if (!vn_create_pipeline_handles(dev, VN_PIPELINE_TYPE_GRAPHICS,
1519                                    createInfoCount, pPipelines, alloc)) {
1520       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1521    }
1522 
1523    STACK_ARRAY(struct vn_graphics_pipeline_fix_desc, fix_descs,
1524                createInfoCount);
1525    for (uint32_t i = 0; i < createInfoCount; i++) {
1526       struct vn_graphics_pipeline *pipeline =
1527          vn_graphics_pipeline_from_handle(pPipelines[i]);
1528       vn_graphics_pipeline_state_fill(&pCreateInfos[i], &pipeline->state,
1529                                       &fix_descs[i]);
1530    }
1531 
1532    struct vn_graphics_pipeline_fix_tmp *fix_tmp = NULL;
1533    pCreateInfos = vn_fix_graphics_pipeline_create_infos(
1534       dev, createInfoCount, pCreateInfos, fix_descs, &fix_tmp, alloc);
1535    if (!pCreateInfos) {
1536       vn_destroy_failed_pipelines(dev, createInfoCount, pPipelines, alloc);
1537       STACK_ARRAY_FINISH(fix_descs);
1538       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1539    }
1540 
1541    for (uint32_t i = 0; i < createInfoCount; i++) {
1542       struct vn_pipeline *pipeline = vn_pipeline_from_handle(pPipelines[i]);
1543       struct vn_pipeline_layout *layout =
1544          vn_pipeline_layout_from_handle(pCreateInfos[i].layout);
1545       if (layout && (layout->push_descriptor_set_layout ||
1546                      layout->has_push_constant_ranges)) {
1547          pipeline->layout = vn_pipeline_layout_ref(dev, layout);
1548       }
1549 
1550       if ((pCreateInfos[i].flags & VN_PIPELINE_CREATE_SYNC_MASK))
1551          want_sync = true;
1552 
1553       vn_invalidate_pipeline_creation_feedback(
1554          (const VkBaseInStructure *)pCreateInfos[i].pNext);
1555    }
1556 
1557    struct vn_ring *target_ring = vn_get_target_ring(dev);
1558    if (!target_ring) {
1559       vk_free(alloc, fix_tmp);
1560       vn_destroy_failed_pipelines(dev, createInfoCount, pPipelines, alloc);
1561       STACK_ARRAY_FINISH(fix_descs);
1562       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1563    }
1564 
1565    if (want_sync || target_ring != dev->primary_ring) {
1566       if (target_ring == dev->primary_ring) {
1567          VN_TRACE_SCOPE("want sync");
1568       }
1569 
1570       result = vn_call_vkCreateGraphicsPipelines(
1571          target_ring, device, pipelineCache, createInfoCount, pCreateInfos,
1572          NULL, pPipelines);
1573       if (result != VK_SUCCESS)
1574          vn_destroy_failed_pipelines(dev, createInfoCount, pPipelines, alloc);
1575    } else {
1576       vn_async_vkCreateGraphicsPipelines(target_ring, device, pipelineCache,
1577                                          createInfoCount, pCreateInfos, NULL,
1578                                          pPipelines);
1579       result = VK_SUCCESS;
1580    }
1581 
1582    vk_free(alloc, fix_tmp);
1583    STACK_ARRAY_FINISH(fix_descs);
1584    return vn_result(dev->instance, result);
1585 }
1586 
1587 VkResult
vn_CreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)1588 vn_CreateComputePipelines(VkDevice device,
1589                           VkPipelineCache pipelineCache,
1590                           uint32_t createInfoCount,
1591                           const VkComputePipelineCreateInfo *pCreateInfos,
1592                           const VkAllocationCallbacks *pAllocator,
1593                           VkPipeline *pPipelines)
1594 {
1595    struct vn_device *dev = vn_device_from_handle(device);
1596    const VkAllocationCallbacks *alloc =
1597       pAllocator ? pAllocator : &dev->base.base.alloc;
1598    bool want_sync = false;
1599    VkResult result;
1600 
1601    memset(pPipelines, 0, sizeof(*pPipelines) * createInfoCount);
1602 
1603    if (!vn_create_pipeline_handles(dev, VN_PIPELINE_TYPE_COMPUTE,
1604                                    createInfoCount, pPipelines, alloc))
1605       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1606 
1607    for (uint32_t i = 0; i < createInfoCount; i++) {
1608       struct vn_pipeline *pipeline = vn_pipeline_from_handle(pPipelines[i]);
1609       struct vn_pipeline_layout *layout =
1610          vn_pipeline_layout_from_handle(pCreateInfos[i].layout);
1611       if (layout->push_descriptor_set_layout ||
1612           layout->has_push_constant_ranges) {
1613          pipeline->layout = vn_pipeline_layout_ref(dev, layout);
1614       }
1615       if ((pCreateInfos[i].flags & VN_PIPELINE_CREATE_SYNC_MASK))
1616          want_sync = true;
1617 
1618       vn_invalidate_pipeline_creation_feedback(
1619          (const VkBaseInStructure *)pCreateInfos[i].pNext);
1620    }
1621 
1622    struct vn_ring *target_ring = vn_get_target_ring(dev);
1623    if (!target_ring) {
1624       vn_destroy_failed_pipelines(dev, createInfoCount, pPipelines, alloc);
1625       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1626    }
1627 
1628    if (want_sync || target_ring != dev->primary_ring) {
1629       result = vn_call_vkCreateComputePipelines(
1630          target_ring, device, pipelineCache, createInfoCount, pCreateInfos,
1631          NULL, pPipelines);
1632       if (result != VK_SUCCESS)
1633          vn_destroy_failed_pipelines(dev, createInfoCount, pPipelines, alloc);
1634    } else {
1635       vn_async_vkCreateComputePipelines(target_ring, device, pipelineCache,
1636                                         createInfoCount, pCreateInfos, NULL,
1637                                         pPipelines);
1638       result = VK_SUCCESS;
1639    }
1640 
1641    return vn_result(dev->instance, result);
1642 }
1643 
1644 void
vn_DestroyPipeline(VkDevice device,VkPipeline _pipeline,const VkAllocationCallbacks * pAllocator)1645 vn_DestroyPipeline(VkDevice device,
1646                    VkPipeline _pipeline,
1647                    const VkAllocationCallbacks *pAllocator)
1648 {
1649    struct vn_device *dev = vn_device_from_handle(device);
1650    struct vn_pipeline *pipeline = vn_pipeline_from_handle(_pipeline);
1651    const VkAllocationCallbacks *alloc =
1652       pAllocator ? pAllocator : &dev->base.base.alloc;
1653 
1654    if (!pipeline)
1655       return;
1656 
1657    if (pipeline->layout) {
1658       vn_pipeline_layout_unref(dev, pipeline->layout);
1659    }
1660 
1661    vn_async_vkDestroyPipeline(dev->primary_ring, device, _pipeline, NULL);
1662 
1663    vn_object_base_fini(&pipeline->base);
1664    vk_free(alloc, pipeline);
1665 }
1666