• Home
  • Raw
  • Download

Lines Matching +full:venus +full:- +full:protocol

3  * SPDX-License-Identifier: MIT
13 #include "venus-protocol/vn_protocol_driver_pipeline.h"
14 #include "venus-protocol/vn_protocol_driver_pipeline_cache.h"
15 #include "venus-protocol/vn_protocol_driver_pipeline_layout.h"
16 #include "venus-protocol/vn_protocol_driver_shader_module.h"
115 * state, pre-rasterization shader state, fragment shader state, and
137 * venus needs to track. Named members reduce long lines.
166 * Graphics pipeline state that Venus tracks to determine which fixes are
224 * - VK_EXT_pipeline_robustness
244 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateShaderModule()
250 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateShaderModule()
252 vn_object_base_init(&mod->base, VK_OBJECT_TYPE_SHADER_MODULE, &dev->base); in vn_CreateShaderModule()
255 vn_async_vkCreateShaderModule(dev->primary_ring, device, pCreateInfo, NULL, in vn_CreateShaderModule()
271 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyShaderModule()
276 vn_async_vkDestroyShaderModule(dev->primary_ring, device, shaderModule, in vn_DestroyShaderModule()
279 vn_object_base_fini(&mod->base); in vn_DestroyShaderModule()
289 const VkAllocationCallbacks *alloc = &dev->base.base.alloc; in vn_pipeline_layout_destroy()
290 if (pipeline_layout->push_descriptor_set_layout) { in vn_pipeline_layout_destroy()
292 dev, pipeline_layout->push_descriptor_set_layout); in vn_pipeline_layout_destroy()
295 dev->primary_ring, vn_device_to_handle(dev), in vn_pipeline_layout_destroy()
298 vn_object_base_fini(&pipeline_layout->base); in vn_pipeline_layout_destroy()
306 vn_refcount_inc(&pipeline_layout->refcount); in vn_pipeline_layout_ref()
314 if (vn_refcount_dec(&pipeline_layout->refcount)) in vn_pipeline_layout_unref()
325 /* ignore pAllocator as the pipeline layout is reference-counted */ in vn_CreatePipelineLayout()
326 const VkAllocationCallbacks *alloc = &dev->base.base.alloc; in vn_CreatePipelineLayout()
332 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreatePipelineLayout()
334 vn_object_base_init(&layout->base, VK_OBJECT_TYPE_PIPELINE_LAYOUT, in vn_CreatePipelineLayout()
335 &dev->base); in vn_CreatePipelineLayout()
336 layout->refcount = VN_REFCOUNT_INIT(1); in vn_CreatePipelineLayout()
338 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; i++) { in vn_CreatePipelineLayout()
340 vn_descriptor_set_layout_from_handle(pCreateInfo->pSetLayouts[i]); in vn_CreatePipelineLayout()
345 * VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter in vn_CreatePipelineLayout()
352 descriptor_set_layout->is_push_descriptor) { in vn_CreatePipelineLayout()
353 layout->push_descriptor_set_layout = in vn_CreatePipelineLayout()
359 layout->has_push_constant_ranges = pCreateInfo->pushConstantRangeCount > 0; in vn_CreatePipelineLayout()
362 vn_async_vkCreatePipelineLayout(dev->primary_ring, device, pCreateInfo, in vn_CreatePipelineLayout()
395 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreatePipelineCache()
401 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreatePipelineCache()
403 vn_object_base_init(&cache->base, VK_OBJECT_TYPE_PIPELINE_CACHE, in vn_CreatePipelineCache()
404 &dev->base); in vn_CreatePipelineCache()
407 if (pCreateInfo->initialDataSize) { in vn_CreatePipelineCache()
409 pCreateInfo->pInitialData; in vn_CreatePipelineCache()
412 local_create_info.initialDataSize -= header->header_size; in vn_CreatePipelineCache()
413 local_create_info.pInitialData += header->header_size; in vn_CreatePipelineCache()
418 vn_async_vkCreatePipelineCache(dev->primary_ring, device, pCreateInfo, in vn_CreatePipelineCache()
435 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyPipelineCache()
440 vn_async_vkDestroyPipelineCache(dev->primary_ring, device, pipelineCache, in vn_DestroyPipelineCache()
443 vn_object_base_fini(&cache->base); in vn_DestroyPipelineCache()
451 return dev->primary_ring; in vn_get_target_ring()
453 struct vn_ring *ring = vn_tls_get_ring(dev->instance); in vn_get_target_ring()
457 if (ring != dev->primary_ring) { in vn_get_target_ring()
462 * - For pipeline create, track ring seqnos of layout and renderpass in vn_get_target_ring()
464 * - For pipeline cache retrieval, track ring seqno of pipeline cache in vn_get_target_ring()
467 vn_ring_wait_all(dev->primary_ring); in vn_get_target_ring()
479 struct vn_physical_device *physical_dev = dev->physical_device; in vn_GetPipelineCacheData()
488 return vn_error(dev->instance, result); in vn_GetPipelineCacheData()
499 const struct vk_properties *props = &physical_dev->base.base.properties; in vn_GetPipelineCacheData()
500 header->header_size = sizeof(*header); in vn_GetPipelineCacheData()
501 header->header_version = VK_PIPELINE_CACHE_HEADER_VERSION_ONE; in vn_GetPipelineCacheData()
502 header->vendor_id = props->vendorID; in vn_GetPipelineCacheData()
503 header->device_id = props->deviceID; in vn_GetPipelineCacheData()
504 memcpy(header->uuid, props->pipelineCacheUUID, VK_UUID_SIZE); in vn_GetPipelineCacheData()
506 *pDataSize -= header->header_size; in vn_GetPipelineCacheData()
509 pDataSize, pData + header->header_size); in vn_GetPipelineCacheData()
511 return vn_error(dev->instance, result); in vn_GetPipelineCacheData()
513 *pDataSize += header->header_size; in vn_GetPipelineCacheData()
526 vn_async_vkMergePipelineCaches(dev->primary_ring, device, dstCache, in vn_MergePipelineCaches()
538 assert(p->type == VN_PIPELINE_TYPE_GRAPHICS); in vn_graphics_pipeline_from_handle()
568 vn_object_base_fini(&pipeline->base); in vn_create_pipeline_handles()
577 vn_object_base_init(&pipeline->base, VK_OBJECT_TYPE_PIPELINE, in vn_create_pipeline_handles()
578 &dev->base); in vn_create_pipeline_handles()
579 pipeline->type = type; in vn_create_pipeline_handles()
597 if (!failed_only || pipeline->base.id == 0) { in vn_destroy_pipeline_handles_internal()
598 if (pipeline->layout) { in vn_destroy_pipeline_handles_internal()
599 vn_pipeline_layout_unref(dev, pipeline->layout); in vn_destroy_pipeline_handles_internal()
601 vn_object_base_fini(&pipeline->base); in vn_destroy_pipeline_handles_internal()
674 tmp->infos = infos; in vn_graphics_pipeline_fix_tmp_alloc()
675 tmp->multisample_state_infos = multisample_state_infos; in vn_graphics_pipeline_fix_tmp_alloc()
676 tmp->viewport_state_infos = viewport_state_infos; in vn_graphics_pipeline_fix_tmp_alloc()
679 tmp->gpl_infos = gpl_infos; in vn_graphics_pipeline_fix_tmp_alloc()
680 tmp->flags2_infos = flags2_infos; in vn_graphics_pipeline_fix_tmp_alloc()
681 tmp->feedback_infos = feedback_infos; in vn_graphics_pipeline_fix_tmp_alloc()
682 tmp->fsr_infos = fsr_infos; in vn_graphics_pipeline_fix_tmp_alloc()
683 tmp->library_infos = library_infos; in vn_graphics_pipeline_fix_tmp_alloc()
684 tmp->rendering_infos = rendering_infos; in vn_graphics_pipeline_fix_tmp_alloc()
713 vk_find_struct_const(info->pNext, in vn_graphics_pipeline_library_state_update()
716 vk_find_struct_const(info->pNext, PIPELINE_LIBRARY_CREATE_INFO_KHR); in vn_graphics_pipeline_library_state_update()
717 const uint32_t lib_count = lib_info ? lib_info->libraryCount : 0; in vn_graphics_pipeline_library_state_update()
720 gpl->mask |= gpl_info->flags; in vn_graphics_pipeline_library_state_update()
723 gpl->mask |= 0; in vn_graphics_pipeline_library_state_update()
725 gpl->mask |= in vn_graphics_pipeline_library_state_update()
746 const VkPipelineDynamicStateCreateInfo *dyn_info = info->pDynamicState; in vn_graphics_dynamic_state_update()
752 for (uint32_t i = 0; i < dyn_info->dynamicStateCount; i++) { in vn_graphics_dynamic_state_update()
753 switch (dyn_info->pDynamicStates[i]) { in vn_graphics_dynamic_state_update()
798 dynamic->vertex_input |= raw.vertex_input; in vn_graphics_dynamic_state_update()
801 dynamic->viewport |= raw.viewport; in vn_graphics_dynamic_state_update()
802 dynamic->viewport_with_count |= raw.viewport_with_count; in vn_graphics_dynamic_state_update()
803 dynamic->scissor |= raw.scissor; in vn_graphics_dynamic_state_update()
804 dynamic->scissor_with_count |= raw.scissor_with_count; in vn_graphics_dynamic_state_update()
805 dynamic->rasterizer_discard_enable |= raw.rasterizer_discard_enable; in vn_graphics_dynamic_state_update()
808 dynamic->sample_mask |= raw.sample_mask; in vn_graphics_dynamic_state_update()
811 dynamic->sample_mask |= raw.sample_mask; in vn_graphics_dynamic_state_update()
831 * VUID-VkGraphicsPipelineCreateInfo-flags-06640 in vn_graphics_shader_stages_update()
842 valid->self.shader_stages = true; in vn_graphics_shader_stages_update()
844 for (uint32_t i = 0; i < info->stageCount; i++) { in vn_graphics_shader_stages_update()
848 * VUID-VkGraphicsPipelineCreateInfo-pStages-06894 in vn_graphics_shader_stages_update()
849 * VUID-VkGraphicsPipelineCreateInfo-pStages-06895 in vn_graphics_shader_stages_update()
850 * VUID-VkGraphicsPipelineCreateInfo-pStages-06896 in vn_graphics_shader_stages_update()
852 *shader_stages |= info->pStages[i].stage; in vn_graphics_shader_stages_update()
875 /* VUID-VkGraphicsPipelineCreateInfo-flags-06643 in vn_render_pass_state_update()
884 valid->self.render_pass |= direct_gpl.pre_raster_shaders || in vn_render_pass_state_update()
888 /* VUID-VkGraphicsPipelineCreateInfo-renderPass-06579 in vn_render_pass_state_update()
896 * VUID-VkGraphicsPipelineCreateInfo-renderPass-06580 in vn_render_pass_state_update()
903 valid->pnext.rendering_info_formats |= in vn_render_pass_state_update()
904 direct_gpl.fragment_output && !info->renderPass; in vn_render_pass_state_update()
906 if (state->attachment_aspects != VK_IMAGE_ASPECT_METADATA_BIT) { in vn_render_pass_state_update()
916 * VUID-VkGraphicsPipelineCreateInfo-renderpass-06625 in vn_render_pass_state_update()
917 * VUID-VkGraphicsPipelineCreateInfo-pLibraries-06628 in vn_render_pass_state_update()
922 if (valid->self.render_pass && info->renderPass) { in vn_render_pass_state_update()
924 vn_render_pass_from_handle(info->renderPass); in vn_render_pass_state_update()
925 state->attachment_aspects = in vn_render_pass_state_update()
926 pass->subpasses[info->subpass].attachment_aspects; in vn_render_pass_state_update()
930 if (valid->pnext.rendering_info_formats) { in vn_render_pass_state_update()
931 state->attachment_aspects = 0; in vn_render_pass_state_update()
950 vk_find_struct_const(info->pNext, PIPELINE_RENDERING_CREATE_INFO); in vn_render_pass_state_update()
953 for (uint32_t i = 0; i < r_info->colorAttachmentCount; i++) { in vn_render_pass_state_update()
954 if (r_info->pColorAttachmentFormats[i]) { in vn_render_pass_state_update()
955 state->attachment_aspects |= VK_IMAGE_ASPECT_COLOR_BIT; in vn_render_pass_state_update()
959 if (r_info->depthAttachmentFormat) in vn_render_pass_state_update()
960 state->attachment_aspects |= VK_IMAGE_ASPECT_DEPTH_BIT; in vn_render_pass_state_update()
961 if (r_info->stencilAttachmentFormat) in vn_render_pass_state_update()
962 state->attachment_aspects |= VK_IMAGE_ASPECT_STENCIL_BIT; in vn_render_pass_state_update()
969 assert(state->attachment_aspects == VK_IMAGE_ASPECT_METADATA_BIT); in vn_render_pass_state_update()
978 * VUID-VkGraphicsPipelineCreateInfo-pLibraries-06611 in vn_graphics_pipeline_state_merge()
985 assert(!(dst->gpl.mask & src->gpl.mask)); in vn_graphics_pipeline_state_merge()
987 dst->gpl.mask |= src->gpl.mask; in vn_graphics_pipeline_state_merge()
988 dst->dynamic.mask |= src->dynamic.mask; in vn_graphics_pipeline_state_merge()
989 dst->shader_stages |= src->shader_stages; in vn_graphics_pipeline_state_merge()
991 VkImageAspectFlags src_aspects = src->render_pass.attachment_aspects; in vn_graphics_pipeline_state_merge()
992 VkImageAspectFlags *dst_aspects = &dst->render_pass.attachment_aspects; in vn_graphics_pipeline_state_merge()
1003 if (dst->gpl.pre_raster_shaders) in vn_graphics_pipeline_state_merge()
1004 dst->rasterizer_discard_enable = src->rasterizer_discard_enable; in vn_graphics_pipeline_state_merge()
1012 * \pre state is zero-filled
1021 * primarily on understanding the non-VU text, and use VUs to verify your
1031 /* Assume that state is already zero-filled. in vn_graphics_pipeline_state_fill()
1035 state->render_pass.attachment_aspects = VK_IMAGE_ASPECT_METADATA_BIT; in vn_graphics_pipeline_state_fill()
1038 vk_find_struct_const(info->pNext, PIPELINE_RENDERING_CREATE_INFO); in vn_graphics_pipeline_state_fill()
1040 vk_find_struct_const(info->pNext, PIPELINE_LIBRARY_CREATE_INFO_KHR); in vn_graphics_pipeline_state_fill()
1041 const uint32_t lib_count = lib_info ? lib_info->libraryCount : 0; in vn_graphics_pipeline_state_fill()
1054 * a valid *value* for a pointer-typed variable. Same for VK_NULL_HANDLE in vn_graphics_pipeline_state_fill()
1055 * and Vulkan handle-typed variables. in vn_graphics_pipeline_state_fill()
1067 vn_graphics_pipeline_from_handle(lib_info->pLibraries[i]); in vn_graphics_pipeline_state_fill()
1068 vn_graphics_pipeline_state_merge(state, &p->state); in vn_graphics_pipeline_state_fill()
1078 * VUID-VkGraphicsPipelineCreateInfo-pLibraries-06611 in vn_graphics_pipeline_state_fill()
1085 assert(!(direct_gpl.mask & state->gpl.mask)); in vn_graphics_pipeline_state_fill()
1088 vn_graphics_dynamic_state_update(info, direct_gpl, &state->dynamic); in vn_graphics_pipeline_state_fill()
1090 &state->shader_stages); in vn_graphics_pipeline_state_fill()
1091 vn_render_pass_state_update(info, direct_gpl, &valid, &state->render_pass); in vn_graphics_pipeline_state_fill()
1093 /* Collect remaining pre-raster shaders state. in vn_graphics_pipeline_state_fill()
1095 * Of the remaining state, we must first collect the pre-raster shaders in vn_graphics_pipeline_state_fill()
1100 (bool)(state->shader_stages & in vn_graphics_pipeline_state_fill()
1106 if (info->pRasterizationState) { in vn_graphics_pipeline_state_fill()
1107 state->rasterizer_discard_enable = in vn_graphics_pipeline_state_fill()
1108 info->pRasterizationState->rasterizerDiscardEnable; in vn_graphics_pipeline_state_fill()
1112 !state->dynamic.rasterizer_discard_enable && in vn_graphics_pipeline_state_fill()
1113 state->rasterizer_discard_enable; in vn_graphics_pipeline_state_fill()
1119 !state->dynamic.viewport && !state->dynamic.viewport_with_count; in vn_graphics_pipeline_state_fill()
1122 !state->dynamic.scissor && !state->dynamic.scissor_with_count; in vn_graphics_pipeline_state_fill()
1126 state->gpl.pre_raster_shaders = true; in vn_graphics_pipeline_state_fill()
1135 !state->gpl.pre_raster_shaders || in vn_graphics_pipeline_state_fill()
1136 (state->shader_stages & VK_SHADER_STAGE_VERTEX_BIT); in vn_graphics_pipeline_state_fill()
1139 may_have_vertex_shader && !state->dynamic.vertex_input; in vn_graphics_pipeline_state_fill()
1144 state->gpl.vertex_input = true; in vn_graphics_pipeline_state_fill()
1153 * If a pipeline specifies pre-rasterization state either directly or by in vn_graphics_pipeline_state_fill()
1163 * specify pre-rasterization state in any way, that pipeline must in vn_graphics_pipeline_state_fill()
1170 state->gpl.pre_raster_shaders && in vn_graphics_pipeline_state_fill()
1171 !state->dynamic.rasterizer_discard_enable && in vn_graphics_pipeline_state_fill()
1172 state->rasterizer_discard_enable; in vn_graphics_pipeline_state_fill()
1179 * VUID-VkGraphicsPipelineCreateInfo-pMultisampleState-06629 in vn_graphics_pipeline_state_fill()
1188 !state->dynamic.sample_mask; in vn_graphics_pipeline_state_fill()
1190 if ((state->render_pass.attachment_aspects & in vn_graphics_pipeline_state_fill()
1193 } else if (state->render_pass.attachment_aspects == in vn_graphics_pipeline_state_fill()
1212 state->gpl.fragment_shader = true; in vn_graphics_pipeline_state_fill()
1220 * VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751 in vn_graphics_pipeline_state_fill()
1229 !state->dynamic.sample_mask; in vn_graphics_pipeline_state_fill()
1232 (bool)(state->render_pass.attachment_aspects & in vn_graphics_pipeline_state_fill()
1235 (bool)(state->render_pass.attachment_aspects & in vn_graphics_pipeline_state_fill()
1240 state->gpl.fragment_output = true; in vn_graphics_pipeline_state_fill()
1247 * VUID-VkGraphicsPipelineCreateInfo-layout-06602 in vn_graphics_pipeline_state_fill()
1249 * If the pipeline requires fragment shader state or pre-rasterization in vn_graphics_pipeline_state_fill()
1252 if ((state->gpl.fragment_shader && !is_raster_statically_disabled) || in vn_graphics_pipeline_state_fill()
1253 state->gpl.pre_raster_shaders) in vn_graphics_pipeline_state_fill()
1258 * VUID-VkGraphicsPipelineCreateInfo-flags-07984 in vn_graphics_pipeline_state_fill()
1261 * basePipelineIndex is -1, basePipelineHandle must be a valid graphics in vn_graphics_pipeline_state_fill()
1265 info->basePipelineIndex == -1) in vn_graphics_pipeline_state_fill()
1270 /* clang-format off */ in vn_graphics_pipeline_state_fill()
1273 info->pStages, in vn_graphics_pipeline_state_fill()
1276 info->pVertexInputState, in vn_graphics_pipeline_state_fill()
1279 info->pInputAssemblyState, in vn_graphics_pipeline_state_fill()
1282 info->pTessellationState, in vn_graphics_pipeline_state_fill()
1285 info->pViewportState, in vn_graphics_pipeline_state_fill()
1289 info->pViewportState && in vn_graphics_pipeline_state_fill()
1290 info->pViewportState->pViewports && in vn_graphics_pipeline_state_fill()
1291 info->pViewportState->viewportCount, in vn_graphics_pipeline_state_fill()
1295 info->pViewportState && in vn_graphics_pipeline_state_fill()
1296 info->pViewportState->pScissors && in vn_graphics_pipeline_state_fill()
1297 info->pViewportState->scissorCount, in vn_graphics_pipeline_state_fill()
1300 info->pRasterizationState, in vn_graphics_pipeline_state_fill()
1303 info->pMultisampleState, in vn_graphics_pipeline_state_fill()
1307 info->pMultisampleState && in vn_graphics_pipeline_state_fill()
1308 info->pMultisampleState->pSampleMask, in vn_graphics_pipeline_state_fill()
1311 info->pDepthStencilState, in vn_graphics_pipeline_state_fill()
1314 info->pColorBlendState, in vn_graphics_pipeline_state_fill()
1317 info->layout, in vn_graphics_pipeline_state_fill()
1320 info->renderPass, in vn_graphics_pipeline_state_fill()
1323 info->basePipelineHandle, in vn_graphics_pipeline_state_fill()
1324 /* clang-format on */ in vn_graphics_pipeline_state_fill()
1327 /* clang-format off */ in vn_graphics_pipeline_state_fill()
1331 rendering_info->pColorAttachmentFormats && in vn_graphics_pipeline_state_fill()
1332 rendering_info->colorAttachmentCount, in vn_graphics_pipeline_state_fill()
1333 /* clang-format on */ in vn_graphics_pipeline_state_fill()
1346 if (ignore->shader_stages) { in vn_fix_graphics_pipeline_create_info_self()
1347 fix_tmp->infos[index].stageCount = 0; in vn_fix_graphics_pipeline_create_info_self()
1348 fix_tmp->infos[index].pStages = NULL; in vn_fix_graphics_pipeline_create_info_self()
1350 if (ignore->vertex_input_state) in vn_fix_graphics_pipeline_create_info_self()
1351 fix_tmp->infos[index].pVertexInputState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1352 if (ignore->input_assembly_state) in vn_fix_graphics_pipeline_create_info_self()
1353 fix_tmp->infos[index].pInputAssemblyState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1354 if (ignore->tessellation_state) in vn_fix_graphics_pipeline_create_info_self()
1355 fix_tmp->infos[index].pTessellationState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1356 if (ignore->viewport_state) in vn_fix_graphics_pipeline_create_info_self()
1357 fix_tmp->infos[index].pViewportState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1358 if (ignore->rasterization_state) in vn_fix_graphics_pipeline_create_info_self()
1359 fix_tmp->infos[index].pRasterizationState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1360 if (ignore->multisample_state) in vn_fix_graphics_pipeline_create_info_self()
1361 fix_tmp->infos[index].pMultisampleState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1362 if (ignore->depth_stencil_state) in vn_fix_graphics_pipeline_create_info_self()
1363 fix_tmp->infos[index].pDepthStencilState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1364 if (ignore->color_blend_state) in vn_fix_graphics_pipeline_create_info_self()
1365 fix_tmp->infos[index].pColorBlendState = NULL; in vn_fix_graphics_pipeline_create_info_self()
1366 if (ignore->pipeline_layout) in vn_fix_graphics_pipeline_create_info_self()
1367 fix_tmp->infos[index].layout = VK_NULL_HANDLE; in vn_fix_graphics_pipeline_create_info_self()
1368 if (ignore->base_pipeline_handle) in vn_fix_graphics_pipeline_create_info_self()
1369 fix_tmp->infos[index].basePipelineHandle = VK_NULL_HANDLE; in vn_fix_graphics_pipeline_create_info_self()
1372 if (ignore->multisample_state_sample_mask) { in vn_fix_graphics_pipeline_create_info_self()
1374 fix_tmp->multisample_state_infos[index] = *info->pMultisampleState; in vn_fix_graphics_pipeline_create_info_self()
1375 fix_tmp->infos[index].pMultisampleState = in vn_fix_graphics_pipeline_create_info_self()
1376 &fix_tmp->multisample_state_infos[index]; in vn_fix_graphics_pipeline_create_info_self()
1378 fix_tmp->multisample_state_infos[index].pSampleMask = NULL; in vn_fix_graphics_pipeline_create_info_self()
1382 if (ignore->viewport_state_viewports || ignore->viewport_state_scissors) { in vn_fix_graphics_pipeline_create_info_self()
1384 fix_tmp->viewport_state_infos[index] = *info->pViewportState; in vn_fix_graphics_pipeline_create_info_self()
1385 fix_tmp->infos[index].pViewportState = in vn_fix_graphics_pipeline_create_info_self()
1386 &fix_tmp->viewport_state_infos[index]; in vn_fix_graphics_pipeline_create_info_self()
1388 if (ignore->viewport_state_viewports) in vn_fix_graphics_pipeline_create_info_self()
1389 fix_tmp->viewport_state_infos[index].pViewports = NULL; in vn_fix_graphics_pipeline_create_info_self()
1390 if (ignore->viewport_state_scissors) in vn_fix_graphics_pipeline_create_info_self()
1391 fix_tmp->viewport_state_infos[index].pScissors = NULL; in vn_fix_graphics_pipeline_create_info_self()
1401 VkGraphicsPipelineLibraryCreateInfoEXT *gpl = &fix_tmp->gpl_infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1402 VkPipelineCreateFlags2CreateInfo *flags2 = &fix_tmp->flags2_infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1404 &fix_tmp->feedback_infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1406 &fix_tmp->fsr_infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1407 VkPipelineLibraryCreateInfoKHR *library = &fix_tmp->library_infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1409 &fix_tmp->rendering_infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1411 VkBaseOutStructure *cur = (void *)&fix_tmp->infos[index]; in vn_graphics_pipeline_create_info_pnext_init()
1413 vk_foreach_struct_const(src, info->pNext) { in vn_graphics_pipeline_create_info_pnext_init()
1415 switch (src->sType) { in vn_graphics_pipeline_create_info_pnext_init()
1445 cur->pNext = next; in vn_graphics_pipeline_create_info_pnext_init()
1450 cur->pNext = NULL; in vn_graphics_pipeline_create_info_pnext_init()
1464 if (ignore->rendering_info_formats) { in vn_fix_graphics_pipeline_create_info_pnext()
1465 fix_tmp->rendering_infos[index].colorAttachmentCount = 0; in vn_fix_graphics_pipeline_create_info_pnext()
1466 fix_tmp->rendering_infos[index].pColorAttachmentFormats = NULL; in vn_fix_graphics_pipeline_create_info_pnext()
1500 memcpy(fix_tmp->infos, infos, info_count * sizeof(infos[0])); in vn_fix_graphics_pipeline_create_infos()
1514 return fix_tmp->infos; in vn_fix_graphics_pipeline_create_infos()
1521 * We invalidate because the venus protocol (as of 2022-08-25) does not know
1524 * VK_EXT_pipeline_creation_feedback, the pNext chain was input-only.
1535 feedback_info->pPipelineCreationFeedback->flags = 0; in vn_invalidate_pipeline_creation_feedback()
1537 for (uint32_t i = 0; i < feedback_info->pipelineStageCreationFeedbackCount; in vn_invalidate_pipeline_creation_feedback()
1539 feedback_info->pPipelineStageCreationFeedbacks[i].flags = 0; in vn_invalidate_pipeline_creation_feedback()
1547 return flags2 ? flags2->flags : flags; in vn_pipeline_create_flags2()
1560 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateGraphicsPipelines()
1564 /* silence -Wmaybe-uninitialized false alarm on release build with gcc */ in vn_CreateGraphicsPipelines()
1572 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateGraphicsPipelines()
1587 &pipeline->state, &fix_descs[i]); in vn_CreateGraphicsPipelines()
1596 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateGraphicsPipelines()
1603 if (layout && (layout->push_descriptor_set_layout || in vn_CreateGraphicsPipelines()
1604 layout->has_push_constant_ranges)) { in vn_CreateGraphicsPipelines()
1605 pipeline->layout = vn_pipeline_layout_ref(dev, layout); in vn_CreateGraphicsPipelines()
1617 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateGraphicsPipelines()
1620 if (want_sync || target_ring != dev->primary_ring) { in vn_CreateGraphicsPipelines()
1621 if (target_ring == dev->primary_ring) { in vn_CreateGraphicsPipelines()
1640 return vn_result(dev->instance, result); in vn_CreateGraphicsPipelines()
1653 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateComputePipelines()
1661 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateComputePipelines()
1667 if (layout->push_descriptor_set_layout || in vn_CreateComputePipelines()
1668 layout->has_push_constant_ranges) { in vn_CreateComputePipelines()
1669 pipeline->layout = vn_pipeline_layout_ref(dev, layout); in vn_CreateComputePipelines()
1684 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateComputePipelines()
1687 if (want_sync || target_ring != dev->primary_ring) { in vn_CreateComputePipelines()
1701 return vn_result(dev->instance, result); in vn_CreateComputePipelines()
1712 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyPipeline()
1717 if (pipeline->layout) { in vn_DestroyPipeline()
1718 vn_pipeline_layout_unref(dev, pipeline->layout); in vn_DestroyPipeline()
1721 vn_async_vkDestroyPipeline(dev->primary_ring, device, _pipeline, NULL); in vn_DestroyPipeline()
1723 vn_object_base_fini(&pipeline->base); in vn_DestroyPipeline()