1 /*
2 * Copyright © 2022 Collabora, LTD
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #ifndef VK_PIPELINE_H
25 #define VK_PIPELINE_H
26
27 #include "vk_object.h"
28 #include "vk_util.h"
29
30 #include <stdbool.h>
31
32 struct nir_shader;
33 struct nir_shader_compiler_options;
34 struct spirv_to_nir_options;
35 struct vk_command_buffer;
36 struct vk_device;
37
38 #ifdef __cplusplus
39 extern "C" {
40 #endif
41
42 #define VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NIR_CREATE_INFO_MESA \
43 (VkStructureType)1000290001
44
45 #define VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NIR_CREATE_INFO_MESA_cast \
46 VkPipelineShaderStageNirCreateInfoMESA
47
48 typedef struct VkPipelineShaderStageNirCreateInfoMESA {
49 VkStructureType sType;
50 const void *pNext;
51 struct nir_shader *nir;
52 } VkPipelineShaderStageNirCreateInfoMESA;
53
54 bool
55 vk_pipeline_shader_stage_is_null(const VkPipelineShaderStageCreateInfo *info);
56
57 VkResult
58 vk_pipeline_shader_stage_to_nir(struct vk_device *device,
59 const VkPipelineShaderStageCreateInfo *info,
60 const struct spirv_to_nir_options *spirv_options,
61 const struct nir_shader_compiler_options *nir_options,
62 void *mem_ctx, struct nir_shader **nir_out);
63
64 enum gl_subgroup_size
65 vk_get_subgroup_size(uint32_t spirv_version,
66 gl_shader_stage stage,
67 const void *info_pNext,
68 bool allow_varying,
69 bool require_full);
70
71 struct vk_pipeline_robustness_state {
72 VkPipelineRobustnessBufferBehaviorEXT storage_buffers;
73 VkPipelineRobustnessBufferBehaviorEXT uniform_buffers;
74 VkPipelineRobustnessBufferBehaviorEXT vertex_inputs;
75 VkPipelineRobustnessImageBehaviorEXT images;
76 };
77
78 /** Hash VkPipelineShaderStageCreateInfo info
79 *
80 * Returns the hash of a VkPipelineShaderStageCreateInfo:
81 * SHA1(info->module->sha1,
82 * info->pName,
83 * vk_stage_to_mesa_stage(info->stage),
84 * info->pSpecializationInfo)
85 *
86 * Can only be used if VkPipelineShaderStageCreateInfo::module is a
87 * vk_shader_module object.
88 */
89 void
90 vk_pipeline_hash_shader_stage(const VkPipelineShaderStageCreateInfo *info,
91 const struct vk_pipeline_robustness_state *rstate,
92 unsigned char *stage_sha1);
93
94 void
95 vk_pipeline_robustness_state_fill(const struct vk_device *device,
96 struct vk_pipeline_robustness_state *rs,
97 const void *pipeline_pNext,
98 const void *shader_stage_pNext);
99
100 static inline VkPipelineCreateFlags2KHR
vk_compute_pipeline_create_flags(const VkComputePipelineCreateInfo * info)101 vk_compute_pipeline_create_flags(const VkComputePipelineCreateInfo *info)
102 {
103 const VkPipelineCreateFlags2CreateInfoKHR *flags2 =
104 vk_find_struct_const(info->pNext,
105 PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR);
106 if (flags2)
107 return flags2->flags;
108 else
109 return info->flags;
110 }
111
112 static inline VkPipelineCreateFlags2KHR
vk_graphics_pipeline_create_flags(const VkGraphicsPipelineCreateInfo * info)113 vk_graphics_pipeline_create_flags(const VkGraphicsPipelineCreateInfo *info)
114 {
115 const VkPipelineCreateFlags2CreateInfoKHR *flags2 =
116 vk_find_struct_const(info->pNext,
117 PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR);
118 if (flags2)
119 return flags2->flags;
120 else
121 return info->flags;
122 }
123
124 static inline VkPipelineCreateFlags2KHR
vk_rt_pipeline_create_flags(const VkRayTracingPipelineCreateInfoKHR * info)125 vk_rt_pipeline_create_flags(const VkRayTracingPipelineCreateInfoKHR *info)
126 {
127 const VkPipelineCreateFlags2CreateInfoKHR *flags2 =
128 vk_find_struct_const(info->pNext,
129 PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR);
130 if (flags2)
131 return flags2->flags;
132 else
133 return info->flags;
134 }
135
136 #ifdef VK_ENABLE_BETA_EXTENSIONS
137 static inline VkPipelineCreateFlags2KHR
vk_graph_pipeline_create_flags(const VkExecutionGraphPipelineCreateInfoAMDX * info)138 vk_graph_pipeline_create_flags(const VkExecutionGraphPipelineCreateInfoAMDX *info)
139 {
140 const VkPipelineCreateFlags2CreateInfoKHR *flags2 =
141 vk_find_struct_const(info->pNext,
142 PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR);
143 if (flags2)
144 return flags2->flags;
145 else
146 return info->flags;
147 }
148 #endif
149
150 struct vk_pipeline_ops;
151
152 struct vk_pipeline {
153 struct vk_object_base base;
154
155 const struct vk_pipeline_ops *ops;
156
157 VkPipelineBindPoint bind_point;
158 VkPipelineCreateFlags2KHR flags;
159 };
160
161 VK_DEFINE_NONDISP_HANDLE_CASTS(vk_pipeline, base, VkPipeline,
162 VK_OBJECT_TYPE_PIPELINE);
163
164 struct vk_pipeline_ops {
165 void (*destroy)(struct vk_device *device,
166 struct vk_pipeline *pipeline,
167 const VkAllocationCallbacks *pAllocator);
168
169 VkResult (*get_executable_properties)(struct vk_device *device,
170 struct vk_pipeline *pipeline,
171 uint32_t *executable_count,
172 VkPipelineExecutablePropertiesKHR *properties);
173
174 VkResult (*get_executable_statistics)(struct vk_device *device,
175 struct vk_pipeline *pipeline,
176 uint32_t executable_index,
177 uint32_t *statistic_count,
178 VkPipelineExecutableStatisticKHR *statistics);
179
180 VkResult (*get_internal_representations)(
181 struct vk_device *device,
182 struct vk_pipeline *pipeline,
183 uint32_t executable_index,
184 uint32_t *internal_representation_count,
185 VkPipelineExecutableInternalRepresentationKHR* internal_representations);
186
187 void (*cmd_bind)(struct vk_command_buffer *cmd_buffer,
188 struct vk_pipeline *pipeline);
189 };
190
191 void *vk_pipeline_zalloc(struct vk_device *device,
192 const struct vk_pipeline_ops *ops,
193 VkPipelineBindPoint bind_point,
194 VkPipelineCreateFlags2KHR flags,
195 const VkAllocationCallbacks *alloc,
196 size_t size);
197
198 void vk_pipeline_free(struct vk_device *device,
199 const VkAllocationCallbacks *alloc,
200 struct vk_pipeline *pipeline);
201
202 void
203 vk_cmd_unbind_pipelines_for_stages(struct vk_command_buffer *cmd_buffer,
204 VkShaderStageFlags stages);
205
206 #ifdef __cplusplus
207 }
208 #endif
209
210 #endif /* VK_PIPELINE_H */
211