1 #include "vk_graphics_state.h"
2
3 #include "vk_alloc.h"
4 #include "vk_command_buffer.h"
5 #include "vk_common_entrypoints.h"
6 #include "vk_device.h"
7 #include "vk_log.h"
8 #include "vk_render_pass.h"
9 #include "vk_standard_sample_locations.h"
10 #include "vk_util.h"
11
12 #include <assert.h>
13
14 enum mesa_vk_graphics_state_groups {
15 MESA_VK_GRAPHICS_STATE_VERTEX_INPUT_BIT = (1 << 0),
16 MESA_VK_GRAPHICS_STATE_INPUT_ASSEMBLY_BIT = (1 << 1),
17 MESA_VK_GRAPHICS_STATE_TESSELLATION_BIT = (1 << 2),
18 MESA_VK_GRAPHICS_STATE_VIEWPORT_BIT = (1 << 3),
19 MESA_VK_GRAPHICS_STATE_DISCARD_RECTANGLES_BIT = (1 << 4),
20 MESA_VK_GRAPHICS_STATE_RASTERIZATION_BIT = (1 << 5),
21 MESA_VK_GRAPHICS_STATE_FRAGMENT_SHADING_RATE_BIT = (1 << 6),
22 MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT = (1 << 7),
23 MESA_VK_GRAPHICS_STATE_DEPTH_STENCIL_BIT = (1 << 8),
24 MESA_VK_GRAPHICS_STATE_COLOR_BLEND_BIT = (1 << 9),
25 MESA_VK_GRAPHICS_STATE_RENDER_PASS_BIT = (1 << 10),
26 };
27
28 static void
clear_all_dynamic_state(BITSET_WORD * dynamic)29 clear_all_dynamic_state(BITSET_WORD *dynamic)
30 {
31 /* Clear the whole array so there are no undefined bits at the top */
32 memset(dynamic, 0, sizeof(*dynamic) *
33 BITSET_WORDS(MESA_VK_DYNAMIC_GRAPHICS_STATE_ENUM_MAX));
34 }
35
36 static void
get_dynamic_state_groups(BITSET_WORD * dynamic,enum mesa_vk_graphics_state_groups groups)37 get_dynamic_state_groups(BITSET_WORD *dynamic,
38 enum mesa_vk_graphics_state_groups groups)
39 {
40 clear_all_dynamic_state(dynamic);
41
42 if (groups & MESA_VK_GRAPHICS_STATE_VERTEX_INPUT_BIT) {
43 BITSET_SET(dynamic, MESA_VK_DYNAMIC_VI);
44 BITSET_SET(dynamic, MESA_VK_DYNAMIC_VI_BINDING_STRIDES);
45 }
46
47 if (groups & MESA_VK_GRAPHICS_STATE_INPUT_ASSEMBLY_BIT) {
48 BITSET_SET(dynamic, MESA_VK_DYNAMIC_IA_PRIMITIVE_TOPOLOGY);
49 BITSET_SET(dynamic, MESA_VK_DYNAMIC_IA_PRIMITIVE_RESTART_ENABLE);
50 }
51
52 if (groups & MESA_VK_GRAPHICS_STATE_TESSELLATION_BIT)
53 BITSET_SET(dynamic, MESA_VK_DYNAMIC_TS_PATCH_CONTROL_POINTS);
54
55 if (groups & MESA_VK_GRAPHICS_STATE_VIEWPORT_BIT) {
56 BITSET_SET(dynamic, MESA_VK_DYNAMIC_VP_VIEWPORT_COUNT);
57 BITSET_SET(dynamic, MESA_VK_DYNAMIC_VP_VIEWPORTS);
58 BITSET_SET(dynamic, MESA_VK_DYNAMIC_VP_SCISSOR_COUNT);
59 BITSET_SET(dynamic, MESA_VK_DYNAMIC_VP_SCISSORS);
60 }
61
62 if (groups & MESA_VK_GRAPHICS_STATE_DISCARD_RECTANGLES_BIT)
63 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DR_RECTANGLES);
64
65 if (groups & MESA_VK_GRAPHICS_STATE_RASTERIZATION_BIT) {
66 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_RASTERIZER_DISCARD_ENABLE);
67 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_CULL_MODE);
68 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_FRONT_FACE);
69 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_DEPTH_BIAS_ENABLE);
70 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_DEPTH_BIAS_FACTORS);
71 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_LINE_WIDTH);
72 BITSET_SET(dynamic, MESA_VK_DYNAMIC_RS_LINE_STIPPLE);
73 }
74
75 if (groups & MESA_VK_GRAPHICS_STATE_FRAGMENT_SHADING_RATE_BIT)
76 BITSET_SET(dynamic, MESA_VK_DYNAMIC_FSR);
77
78 if (groups & MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT)
79 BITSET_SET(dynamic, MESA_VK_DYNAMIC_MS_SAMPLE_LOCATIONS);
80
81 if (groups & MESA_VK_GRAPHICS_STATE_DEPTH_STENCIL_BIT) {
82 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_TEST_ENABLE);
83 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_WRITE_ENABLE);
84 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_COMPARE_OP);
85 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_BOUNDS_TEST_ENABLE);
86 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_BOUNDS_TEST_BOUNDS);
87 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_TEST_ENABLE);
88 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_OP);
89 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_COMPARE_MASK);
90 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_WRITE_MASK);
91 BITSET_SET(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_REFERENCE);
92 }
93
94 if (groups & MESA_VK_GRAPHICS_STATE_COLOR_BLEND_BIT) {
95 BITSET_SET(dynamic, MESA_VK_DYNAMIC_CB_LOGIC_OP);
96 BITSET_SET(dynamic, MESA_VK_DYNAMIC_CB_COLOR_WRITE_ENABLES);
97 BITSET_SET(dynamic, MESA_VK_DYNAMIC_CB_BLEND_CONSTANTS);
98 }
99 }
100
101 static enum mesa_vk_graphics_state_groups
fully_dynamic_state_groups(const BITSET_WORD * dynamic)102 fully_dynamic_state_groups(const BITSET_WORD *dynamic)
103 {
104 enum mesa_vk_graphics_state_groups groups = 0;
105
106 if (BITSET_TEST(dynamic, MESA_VK_DYNAMIC_VI))
107 groups |= MESA_VK_GRAPHICS_STATE_VERTEX_INPUT_BIT;
108
109 if (BITSET_TEST(dynamic, MESA_VK_DYNAMIC_IA_PRIMITIVE_TOPOLOGY) &&
110 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_IA_PRIMITIVE_RESTART_ENABLE))
111 groups |= MESA_VK_GRAPHICS_STATE_INPUT_ASSEMBLY_BIT;
112
113 if (BITSET_TEST(dynamic, MESA_VK_DYNAMIC_FSR))
114 groups |= MESA_VK_GRAPHICS_STATE_FRAGMENT_SHADING_RATE_BIT;
115
116 if (BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_TEST_ENABLE) &&
117 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_WRITE_ENABLE) &&
118 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_COMPARE_OP) &&
119 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_BOUNDS_TEST_ENABLE) &&
120 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_DEPTH_BOUNDS_TEST_BOUNDS) &&
121 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_TEST_ENABLE) &&
122 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_OP) &&
123 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_COMPARE_MASK) &&
124 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_WRITE_MASK) &&
125 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_DS_STENCIL_REFERENCE))
126 groups |= MESA_VK_GRAPHICS_STATE_DEPTH_STENCIL_BIT;
127
128 return groups;
129 }
130
131 static void
validate_dynamic_state_groups(const BITSET_WORD * dynamic,enum mesa_vk_graphics_state_groups groups)132 validate_dynamic_state_groups(const BITSET_WORD *dynamic,
133 enum mesa_vk_graphics_state_groups groups)
134 {
135 #ifndef NDEBUG
136 BITSET_DECLARE(all_dynamic, MESA_VK_DYNAMIC_GRAPHICS_STATE_ENUM_MAX);
137 get_dynamic_state_groups(all_dynamic, groups);
138
139 for (uint32_t w = 0; w < ARRAY_SIZE(all_dynamic); w++)
140 assert(!(dynamic[w] & ~all_dynamic[w]));
141 #endif
142 }
143
144 void
vk_get_dynamic_graphics_states(BITSET_WORD * dynamic,const VkPipelineDynamicStateCreateInfo * info)145 vk_get_dynamic_graphics_states(BITSET_WORD *dynamic,
146 const VkPipelineDynamicStateCreateInfo *info)
147 {
148 clear_all_dynamic_state(dynamic);
149
150 /* From the Vulkan 1.3.218 spec:
151 *
152 * "pDynamicState is a pointer to a VkPipelineDynamicStateCreateInfo
153 * structure defining which properties of the pipeline state object are
154 * dynamic and can be changed independently of the pipeline state. This
155 * can be NULL, which means no state in the pipeline is considered
156 * dynamic."
157 */
158 if (info == NULL)
159 return;
160
161 #define CASE(VK, MESA) \
162 case VK_DYNAMIC_STATE_##VK: \
163 BITSET_SET(dynamic, MESA_VK_DYNAMIC_##MESA); \
164 break;
165
166 #define CASE2(VK, MESA1, MESA2) \
167 case VK_DYNAMIC_STATE_##VK: \
168 BITSET_SET(dynamic, MESA_VK_DYNAMIC_##MESA1); \
169 BITSET_SET(dynamic, MESA_VK_DYNAMIC_##MESA2); \
170 break;
171
172 for (uint32_t i = 0; i < info->dynamicStateCount; i++) {
173 switch (info->pDynamicStates[i]) {
174 CASE2(VERTEX_INPUT_EXT, VI, VI_BINDING_STRIDES)
175 CASE( VERTEX_INPUT_BINDING_STRIDE, VI_BINDING_STRIDES)
176 CASE( VIEWPORT, VP_VIEWPORTS)
177 CASE( SCISSOR, VP_SCISSORS)
178 CASE( LINE_WIDTH, RS_LINE_WIDTH)
179 CASE( DEPTH_BIAS, RS_DEPTH_BIAS_FACTORS)
180 CASE( BLEND_CONSTANTS, CB_BLEND_CONSTANTS)
181 CASE( DEPTH_BOUNDS, DS_DEPTH_BOUNDS_TEST_BOUNDS)
182 CASE( STENCIL_COMPARE_MASK, DS_STENCIL_COMPARE_MASK)
183 CASE( STENCIL_WRITE_MASK, DS_STENCIL_WRITE_MASK)
184 CASE( STENCIL_REFERENCE, DS_STENCIL_REFERENCE)
185 CASE( CULL_MODE, RS_CULL_MODE)
186 CASE( FRONT_FACE, RS_FRONT_FACE)
187 CASE( PRIMITIVE_TOPOLOGY, IA_PRIMITIVE_TOPOLOGY)
188 CASE2(VIEWPORT_WITH_COUNT, VP_VIEWPORT_COUNT, VP_VIEWPORTS)
189 CASE2(SCISSOR_WITH_COUNT, VP_SCISSOR_COUNT, VP_SCISSORS)
190 CASE( DEPTH_TEST_ENABLE, DS_DEPTH_TEST_ENABLE)
191 CASE( DEPTH_WRITE_ENABLE, DS_DEPTH_WRITE_ENABLE)
192 CASE( DEPTH_COMPARE_OP, DS_DEPTH_COMPARE_OP)
193 CASE( DEPTH_BOUNDS_TEST_ENABLE, DS_DEPTH_BOUNDS_TEST_ENABLE)
194 CASE( STENCIL_TEST_ENABLE, DS_STENCIL_TEST_ENABLE)
195 CASE( STENCIL_OP, DS_STENCIL_OP)
196 CASE( RASTERIZER_DISCARD_ENABLE, RS_RASTERIZER_DISCARD_ENABLE)
197 CASE( DEPTH_BIAS_ENABLE, RS_DEPTH_BIAS_ENABLE)
198 CASE( PRIMITIVE_RESTART_ENABLE, IA_PRIMITIVE_RESTART_ENABLE)
199 CASE( DISCARD_RECTANGLE_EXT, DR_RECTANGLES)
200 CASE( SAMPLE_LOCATIONS_EXT, MS_SAMPLE_LOCATIONS)
201 CASE( FRAGMENT_SHADING_RATE_KHR, FSR)
202 CASE( LINE_STIPPLE_EXT, RS_LINE_STIPPLE)
203 CASE( PATCH_CONTROL_POINTS_EXT, TS_PATCH_CONTROL_POINTS)
204 CASE( LOGIC_OP_EXT, CB_LOGIC_OP)
205 CASE( COLOR_WRITE_ENABLE_EXT, CB_COLOR_WRITE_ENABLES)
206 default:
207 unreachable("Unsupported dynamic graphics state");
208 }
209 }
210 }
211
212 #define IS_DYNAMIC(STATE) \
213 BITSET_TEST(dynamic, MESA_VK_DYNAMIC_##STATE)
214
215 #define IS_NEEDED(STATE) \
216 BITSET_TEST(needed, MESA_VK_DYNAMIC_##STATE)
217
218 static void
vk_vertex_input_state_init(struct vk_vertex_input_state * vi,const BITSET_WORD * dynamic,const VkPipelineVertexInputStateCreateInfo * vi_info)219 vk_vertex_input_state_init(struct vk_vertex_input_state *vi,
220 const BITSET_WORD *dynamic,
221 const VkPipelineVertexInputStateCreateInfo *vi_info)
222 {
223 assert(!IS_DYNAMIC(VI));
224
225 memset(vi, 0, sizeof(*vi));
226
227 for (uint32_t i = 0; i < vi_info->vertexBindingDescriptionCount; i++) {
228 const VkVertexInputBindingDescription *desc =
229 &vi_info->pVertexBindingDescriptions[i];
230
231 assert(desc->binding < MESA_VK_MAX_VERTEX_BINDINGS);
232 assert(desc->stride <= MESA_VK_MAX_VERTEX_BINDING_STRIDE);
233 assert(desc->inputRate <= 1);
234
235 const uint32_t b = desc->binding;
236 vi->bindings_valid |= BITFIELD_BIT(b);
237 vi->bindings[b].stride = desc->stride;
238 vi->bindings[b].input_rate = desc->inputRate;
239 vi->bindings[b].divisor = 1;
240 }
241
242 for (uint32_t i = 0; i < vi_info->vertexAttributeDescriptionCount; i++) {
243 const VkVertexInputAttributeDescription *desc =
244 &vi_info->pVertexAttributeDescriptions[i];
245
246 assert(desc->location < MESA_VK_MAX_VERTEX_ATTRIBUTES);
247 assert(desc->binding < MESA_VK_MAX_VERTEX_BINDINGS);
248 assert(vi->bindings_valid & BITFIELD_BIT(desc->binding));
249
250 const uint32_t a = desc->location;
251 vi->attributes_valid |= BITFIELD_BIT(a);
252 vi->attributes[a].binding = desc->binding;
253 vi->attributes[a].format = desc->format;
254 vi->attributes[a].offset = desc->offset;
255 }
256
257 const VkPipelineVertexInputDivisorStateCreateInfoEXT *vi_div_state =
258 vk_find_struct_const(vi_info->pNext,
259 PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT);
260 if (vi_div_state) {
261 for (uint32_t i = 0; i < vi_div_state->vertexBindingDivisorCount; i++) {
262 const VkVertexInputBindingDivisorDescriptionEXT *desc =
263 &vi_div_state->pVertexBindingDivisors[i];
264
265 assert(desc->binding < MESA_VK_MAX_VERTEX_BINDINGS);
266 assert(vi->bindings_valid & BITFIELD_BIT(desc->binding));
267
268 const uint32_t b = desc->binding;
269 vi->bindings[b].divisor = desc->divisor;
270 }
271 }
272 }
273
274 static void
vk_dynamic_graphics_state_init_vi(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_vertex_input_state * vi)275 vk_dynamic_graphics_state_init_vi(struct vk_dynamic_graphics_state *dst,
276 const BITSET_WORD *needed,
277 const struct vk_vertex_input_state *vi)
278 {
279 if (IS_NEEDED(VI))
280 *dst->vi = *vi;
281
282 if (IS_NEEDED(VI_BINDING_STRIDES)) {
283 for (uint32_t b = 0; b < MESA_VK_MAX_VERTEX_BINDINGS; b++) {
284 if (vi->bindings_valid & BITFIELD_BIT(b))
285 dst->vi_binding_strides[b] = vi->bindings[b].stride;
286 else
287 dst->vi_binding_strides[b] = 0;
288 }
289 }
290 }
291
292 static void
vk_input_assembly_state_init(struct vk_input_assembly_state * ia,const BITSET_WORD * dynamic,const VkPipelineInputAssemblyStateCreateInfo * ia_info)293 vk_input_assembly_state_init(struct vk_input_assembly_state *ia,
294 const BITSET_WORD *dynamic,
295 const VkPipelineInputAssemblyStateCreateInfo *ia_info)
296 {
297 if (IS_DYNAMIC(IA_PRIMITIVE_TOPOLOGY)) {
298 ia->primitive_topology = -1;
299 } else {
300 assert(ia_info->topology <= UINT8_MAX);
301 ia->primitive_topology = ia_info->topology;
302 }
303
304 ia->primitive_restart_enable = ia_info->primitiveRestartEnable;
305 }
306
307 static void
vk_dynamic_graphics_state_init_ia(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_input_assembly_state * ia)308 vk_dynamic_graphics_state_init_ia(struct vk_dynamic_graphics_state *dst,
309 const BITSET_WORD *needed,
310 const struct vk_input_assembly_state *ia)
311 {
312 dst->ia = *ia;
313 }
314
315 static void
vk_tessellation_state_init(struct vk_tessellation_state * ts,const BITSET_WORD * dynamic,const VkPipelineTessellationStateCreateInfo * ts_info)316 vk_tessellation_state_init(struct vk_tessellation_state *ts,
317 const BITSET_WORD *dynamic,
318 const VkPipelineTessellationStateCreateInfo *ts_info)
319 {
320 if (IS_DYNAMIC(TS_PATCH_CONTROL_POINTS)) {
321 ts->patch_control_points = 0;
322 } else {
323 assert(ts_info->patchControlPoints <= UINT8_MAX);
324 ts->patch_control_points = ts_info->patchControlPoints;
325 }
326
327 const VkPipelineTessellationDomainOriginStateCreateInfo *ts_do_info =
328 vk_find_struct_const(ts_info->pNext,
329 PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO);
330 if (ts_do_info != NULL) {
331 assert(ts_do_info->domainOrigin <= UINT8_MAX);
332 ts->domain_origin = ts_do_info->domainOrigin;
333 } else {
334 ts->domain_origin = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT;
335 }
336 }
337
338 static void
vk_dynamic_graphics_state_init_ts(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_tessellation_state * ts)339 vk_dynamic_graphics_state_init_ts(struct vk_dynamic_graphics_state *dst,
340 const BITSET_WORD *needed,
341 const struct vk_tessellation_state *ts)
342 {
343 dst->ts.patch_control_points = ts->patch_control_points;
344 }
345
346 static void
vk_viewport_state_init(struct vk_viewport_state * vp,const BITSET_WORD * dynamic,const VkPipelineViewportStateCreateInfo * vp_info)347 vk_viewport_state_init(struct vk_viewport_state *vp,
348 const BITSET_WORD *dynamic,
349 const VkPipelineViewportStateCreateInfo *vp_info)
350 {
351 memset(vp, 0, sizeof(*vp));
352
353 if (!IS_DYNAMIC(VP_VIEWPORT_COUNT)) {
354 assert(vp_info->viewportCount <= MESA_VK_MAX_VIEWPORTS);
355 vp->viewport_count = vp_info->viewportCount;
356 }
357
358 if (!IS_DYNAMIC(VP_VIEWPORTS)) {
359 assert(!IS_DYNAMIC(VP_VIEWPORT_COUNT));
360 typed_memcpy(vp->viewports, vp_info->pViewports,
361 vp_info->viewportCount);
362 }
363
364 if (!IS_DYNAMIC(VP_SCISSOR_COUNT)) {
365 assert(vp_info->scissorCount <= MESA_VK_MAX_SCISSORS);
366 vp->scissor_count = vp_info->scissorCount;
367 }
368
369 if (!IS_DYNAMIC(VP_SCISSORS)) {
370 assert(!IS_DYNAMIC(VP_SCISSOR_COUNT));
371 typed_memcpy(vp->scissors, vp_info->pScissors,
372 vp_info->scissorCount);
373 }
374
375 const VkPipelineViewportDepthClipControlCreateInfoEXT *vp_dcc_info =
376 vk_find_struct_const(vp_info->pNext,
377 PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT);
378 if (vp_dcc_info != NULL)
379 vp->negative_one_to_one = vp_dcc_info->negativeOneToOne;
380 }
381
382 static void
vk_dynamic_graphics_state_init_vp(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_viewport_state * vp)383 vk_dynamic_graphics_state_init_vp(struct vk_dynamic_graphics_state *dst,
384 const BITSET_WORD *needed,
385 const struct vk_viewport_state *vp)
386 {
387 dst->vp.viewport_count = vp->viewport_count;
388 if (IS_NEEDED(VP_VIEWPORTS))
389 typed_memcpy(dst->vp.viewports, vp->viewports, vp->viewport_count);
390
391 dst->vp.scissor_count = vp->scissor_count;
392 if (IS_NEEDED(VP_SCISSORS))
393 typed_memcpy(dst->vp.scissors, vp->scissors, vp->scissor_count);
394 }
395
396 static void
vk_discard_rectangles_state_init(struct vk_discard_rectangles_state * dr,const BITSET_WORD * dynamic,const VkPipelineDiscardRectangleStateCreateInfoEXT * dr_info)397 vk_discard_rectangles_state_init(struct vk_discard_rectangles_state *dr,
398 const BITSET_WORD *dynamic,
399 const VkPipelineDiscardRectangleStateCreateInfoEXT *dr_info)
400 {
401 memset(dr, 0, sizeof(*dr));
402
403 if (dr_info == NULL)
404 return;
405
406 dr->mode = dr_info->discardRectangleMode;
407
408 if (!IS_DYNAMIC(DR_RECTANGLES)) {
409 assert(dr_info->discardRectangleCount <= MESA_VK_MAX_DISCARD_RECTANGLES);
410 dr->rectangle_count = dr_info->discardRectangleCount;
411 typed_memcpy(dr->rectangles, dr_info->pDiscardRectangles,
412 dr_info->discardRectangleCount);
413 }
414 }
415
416 static void
vk_dynamic_graphics_state_init_dr(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_discard_rectangles_state * dr)417 vk_dynamic_graphics_state_init_dr(struct vk_dynamic_graphics_state *dst,
418 const BITSET_WORD *needed,
419 const struct vk_discard_rectangles_state *dr)
420 {
421 dst->dr.rectangle_count = dr->rectangle_count;
422 typed_memcpy(dst->dr.rectangles, dr->rectangles, dr->rectangle_count);
423 }
424
425 static void
vk_rasterization_state_init(struct vk_rasterization_state * rs,const BITSET_WORD * dynamic,const VkPipelineRasterizationStateCreateInfo * rs_info)426 vk_rasterization_state_init(struct vk_rasterization_state *rs,
427 const BITSET_WORD *dynamic,
428 const VkPipelineRasterizationStateCreateInfo *rs_info)
429 {
430 *rs = (struct vk_rasterization_state) {
431 .rasterizer_discard_enable = false,
432 .conservative_mode = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
433 .rasterization_order_amd = VK_RASTERIZATION_ORDER_STRICT_AMD,
434 .provoking_vertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT,
435 .line.mode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
436 };
437
438 if (!IS_DYNAMIC(RS_RASTERIZER_DISCARD_ENABLE))
439 rs->rasterizer_discard_enable = rs_info->rasterizerDiscardEnable;
440
441 /* From the Vulkan 1.3.218 spec:
442 *
443 * "If VkPipelineRasterizationDepthClipStateCreateInfoEXT is present in
444 * the graphics pipeline state then depth clipping is disabled if
445 * VkPipelineRasterizationDepthClipStateCreateInfoEXT::depthClipEnable
446 * is VK_FALSE. Otherwise, if
447 * VkPipelineRasterizationDepthClipStateCreateInfoEXT is not present,
448 * depth clipping is disabled when
449 * VkPipelineRasterizationStateCreateInfo::depthClampEnable is VK_TRUE.
450 */
451 rs->depth_clamp_enable = rs_info->depthClampEnable;
452 rs->depth_clip_enable = !rs_info->depthClampEnable;
453
454 rs->polygon_mode = rs_info->polygonMode;
455
456 rs->cull_mode = rs_info->cullMode;
457 rs->front_face = rs_info->frontFace;
458 rs->depth_bias.enable = rs_info->depthBiasEnable;
459 if ((rs_info->depthBiasEnable || IS_DYNAMIC(RS_DEPTH_BIAS_ENABLE)) &&
460 !IS_DYNAMIC(RS_DEPTH_BIAS_FACTORS)) {
461 rs->depth_bias.constant = rs_info->depthBiasConstantFactor;
462 rs->depth_bias.clamp = rs_info->depthBiasClamp;
463 rs->depth_bias.slope = rs_info->depthBiasSlopeFactor;
464 }
465 rs->line.width = rs_info->lineWidth;
466
467 vk_foreach_struct_const(ext, rs_info->pNext) {
468 switch (ext->sType) {
469 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: {
470 const VkPipelineRasterizationConservativeStateCreateInfoEXT *rcs_info =
471 (const VkPipelineRasterizationConservativeStateCreateInfoEXT *)ext;
472 rs->conservative_mode = rcs_info->conservativeRasterizationMode;
473 break;
474 }
475
476 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: {
477 const VkPipelineRasterizationDepthClipStateCreateInfoEXT *rdc_info =
478 (const VkPipelineRasterizationDepthClipStateCreateInfoEXT *)ext;
479 rs->depth_clip_enable = rdc_info->depthClipEnable;
480 break;
481 }
482
483 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT: {
484 const VkPipelineRasterizationLineStateCreateInfoEXT *rl_info =
485 (const VkPipelineRasterizationLineStateCreateInfoEXT *)ext;
486 rs->line.mode = rl_info->lineRasterizationMode;
487 rs->line.stipple.enable = rl_info->stippledLineEnable;
488 if (rs->line.stipple.enable && !IS_DYNAMIC(RS_LINE_STIPPLE)) {
489 rs->line.stipple.factor = rl_info->lineStippleFactor;
490 rs->line.stipple.pattern = rl_info->lineStipplePattern;
491 }
492 break;
493 }
494
495 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT: {
496 const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *rpv_info =
497 (const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *)ext;
498 rs->provoking_vertex = rpv_info->provokingVertexMode;
499 break;
500 }
501
502 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: {
503 const VkPipelineRasterizationStateRasterizationOrderAMD *rro_info =
504 (const VkPipelineRasterizationStateRasterizationOrderAMD *)ext;
505 rs->rasterization_order_amd = rro_info->rasterizationOrder;
506 break;
507 }
508
509 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: {
510 const VkPipelineRasterizationStateStreamCreateInfoEXT *rss_info =
511 (const VkPipelineRasterizationStateStreamCreateInfoEXT *)ext;
512 rs->rasterization_stream = rss_info->rasterizationStream;
513 break;
514 }
515
516 default:
517 break;
518 }
519 }
520 }
521
522 static void
vk_dynamic_graphics_state_init_rs(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_rasterization_state * rs)523 vk_dynamic_graphics_state_init_rs(struct vk_dynamic_graphics_state *dst,
524 const BITSET_WORD *needed,
525 const struct vk_rasterization_state *rs)
526 {
527 dst->rs.rasterizer_discard_enable = rs->rasterizer_discard_enable;
528 dst->rs.cull_mode = rs->cull_mode;
529 dst->rs.front_face = rs->front_face;
530 dst->rs.depth_bias.enable = rs->depth_bias.enable;
531 dst->rs.depth_bias.constant = rs->depth_bias.constant;
532 dst->rs.depth_bias.clamp = rs->depth_bias.clamp;
533 dst->rs.depth_bias.slope = rs->depth_bias.slope;
534 dst->rs.line.width = rs->line.width;
535 dst->rs.line.stipple.factor = rs->line.stipple.factor;
536 dst->rs.line.stipple.pattern = rs->line.stipple.pattern;
537 }
538
539 static void
vk_fragment_shading_rate_state_init(struct vk_fragment_shading_rate_state * fsr,const BITSET_WORD * dynamic,const VkPipelineFragmentShadingRateStateCreateInfoKHR * fsr_info)540 vk_fragment_shading_rate_state_init(
541 struct vk_fragment_shading_rate_state *fsr,
542 const BITSET_WORD *dynamic,
543 const VkPipelineFragmentShadingRateStateCreateInfoKHR *fsr_info)
544 {
545 if (fsr_info != NULL) {
546 fsr->fragment_size = fsr_info->fragmentSize;
547 fsr->combiner_ops[0] = fsr_info->combinerOps[0];
548 fsr->combiner_ops[1] = fsr_info->combinerOps[1];
549 } else {
550 fsr->fragment_size = (VkExtent2D) { 1, 1 };
551 fsr->combiner_ops[0] = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR;
552 fsr->combiner_ops[1] = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR;
553 }
554 }
555
556 static void
vk_dynamic_graphics_state_init_fsr(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_fragment_shading_rate_state * fsr)557 vk_dynamic_graphics_state_init_fsr(
558 struct vk_dynamic_graphics_state *dst,
559 const BITSET_WORD *needed,
560 const struct vk_fragment_shading_rate_state *fsr)
561 {
562 dst->fsr = *fsr;
563 }
564
565 static void
vk_sample_locations_state_init(struct vk_sample_locations_state * sl,const VkSampleLocationsInfoEXT * sl_info)566 vk_sample_locations_state_init(struct vk_sample_locations_state *sl,
567 const VkSampleLocationsInfoEXT *sl_info)
568 {
569 sl->per_pixel = sl_info->sampleLocationsPerPixel;
570 sl->grid_size = sl_info->sampleLocationGridSize;
571
572 /* From the Vulkan 1.3.218 spec:
573 *
574 * VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-01527
575 *
576 * "sampleLocationsCount must equal sampleLocationsPerPixel *
577 * sampleLocationGridSize.width * sampleLocationGridSize.height"
578 */
579 assert(sl_info->sampleLocationsCount ==
580 sl_info->sampleLocationsPerPixel *
581 sl_info->sampleLocationGridSize.width *
582 sl_info->sampleLocationGridSize.height);
583
584 assert(sl_info->sampleLocationsCount <= MESA_VK_MAX_SAMPLE_LOCATIONS);
585 typed_memcpy(sl->locations, sl_info->pSampleLocations,
586 sl_info->sampleLocationsCount);
587 }
588
589 static void
vk_multisample_state_init(struct vk_multisample_state * ms,const BITSET_WORD * dynamic,const VkPipelineMultisampleStateCreateInfo * ms_info)590 vk_multisample_state_init(struct vk_multisample_state *ms,
591 const BITSET_WORD *dynamic,
592 const VkPipelineMultisampleStateCreateInfo *ms_info)
593 {
594 ms->rasterization_samples = ms_info->rasterizationSamples;
595 ms->sample_shading_enable = ms_info->sampleShadingEnable;
596 ms->min_sample_shading = ms_info->minSampleShading;
597
598 /* From the Vulkan 1.3.218 spec:
599 *
600 * "If pSampleMask is NULL, it is treated as if the mask has all bits
601 * set to 1."
602 */
603 ms->sample_mask = ms_info->pSampleMask ? *ms_info->pSampleMask : ~0;
604
605 ms->alpha_to_coverage_enable = ms_info->alphaToCoverageEnable;
606 ms->alpha_to_one_enable = ms_info->alphaToOneEnable;
607
608 /* These get filled in by vk_multisample_sample_locations_state_init() */
609 ms->sample_locations_enable = false;
610 ms->sample_locations = NULL;
611 }
612
613 static bool
needs_sample_locations_state(const BITSET_WORD * dynamic,const VkPipelineSampleLocationsStateCreateInfoEXT * sl_info)614 needs_sample_locations_state(
615 const BITSET_WORD *dynamic,
616 const VkPipelineSampleLocationsStateCreateInfoEXT *sl_info)
617 {
618 return !IS_DYNAMIC(MS_SAMPLE_LOCATIONS) &&
619 sl_info != NULL && sl_info->sampleLocationsEnable;
620 }
621
622 static void
vk_multisample_sample_locations_state_init(struct vk_multisample_state * ms,struct vk_sample_locations_state * sl,const BITSET_WORD * dynamic,const VkPipelineMultisampleStateCreateInfo * ms_info,const VkPipelineSampleLocationsStateCreateInfoEXT * sl_info)623 vk_multisample_sample_locations_state_init(
624 struct vk_multisample_state *ms,
625 struct vk_sample_locations_state *sl,
626 const BITSET_WORD *dynamic,
627 const VkPipelineMultisampleStateCreateInfo *ms_info,
628 const VkPipelineSampleLocationsStateCreateInfoEXT *sl_info)
629 {
630 ms->sample_locations_enable =
631 sl_info != NULL && sl_info->sampleLocationsEnable;
632
633 assert(ms->sample_locations == NULL);
634 if (!IS_DYNAMIC(MS_SAMPLE_LOCATIONS)) {
635 if (ms->sample_locations_enable) {
636 vk_sample_locations_state_init(sl, &sl_info->sampleLocationsInfo);
637 ms->sample_locations = sl;
638 } else {
639 /* Otherwise, pre-populate with the standard sample locations. If
640 * the driver doesn't support standard sample locations, it probably
641 * doesn't support custom locations either and can completely ignore
642 * this state.
643 */
644 ms->sample_locations =
645 vk_standard_sample_locations_state(ms_info->rasterizationSamples);
646 }
647 }
648 }
649
650 static void
vk_dynamic_graphics_state_init_ms(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_multisample_state * ms)651 vk_dynamic_graphics_state_init_ms(struct vk_dynamic_graphics_state *dst,
652 const BITSET_WORD *needed,
653 const struct vk_multisample_state *ms)
654 {
655 if (IS_NEEDED(MS_SAMPLE_LOCATIONS))
656 *dst->ms.sample_locations = *ms->sample_locations;
657 }
658
659 static void
vk_stencil_test_face_state_init(struct vk_stencil_test_face_state * face,const VkStencilOpState * info)660 vk_stencil_test_face_state_init(struct vk_stencil_test_face_state *face,
661 const VkStencilOpState *info)
662 {
663 face->op.fail = info->failOp;
664 face->op.pass = info->passOp;
665 face->op.depth_fail = info->depthFailOp;
666 face->op.compare = info->compareOp;
667 face->compare_mask = info->compareMask;
668 face->write_mask = info->writeMask;
669 face->reference = info->reference;
670 }
671
672 static void
vk_depth_stencil_state_init(struct vk_depth_stencil_state * ds,const BITSET_WORD * dynamic,const VkPipelineDepthStencilStateCreateInfo * ds_info)673 vk_depth_stencil_state_init(struct vk_depth_stencil_state *ds,
674 const BITSET_WORD *dynamic,
675 const VkPipelineDepthStencilStateCreateInfo *ds_info)
676 {
677 memset(ds, 0, sizeof(*ds));
678
679 ds->depth.test_enable = ds_info->depthTestEnable;
680 ds->depth.write_enable = ds_info->depthWriteEnable;
681 ds->depth.compare_op = ds_info->depthCompareOp;
682 ds->depth.bounds_test.enable = ds_info->depthBoundsTestEnable;
683 ds->depth.bounds_test.min = ds_info->minDepthBounds;
684 ds->depth.bounds_test.max = ds_info->maxDepthBounds;
685
686 ds->stencil.test_enable = ds_info->stencilTestEnable;
687 ds->stencil.write_enable = true;
688 vk_stencil_test_face_state_init(&ds->stencil.front, &ds_info->front);
689 vk_stencil_test_face_state_init(&ds->stencil.back, &ds_info->back);
690 }
691
692 static void
vk_dynamic_graphics_state_init_ds(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_depth_stencil_state * ds)693 vk_dynamic_graphics_state_init_ds(struct vk_dynamic_graphics_state *dst,
694 const BITSET_WORD *needed,
695 const struct vk_depth_stencil_state *ds)
696 {
697 dst->ds = *ds;
698 }
699
700 static bool
optimize_stencil_face(struct vk_stencil_test_face_state * face,VkCompareOp depthCompareOp,bool consider_write_mask)701 optimize_stencil_face(struct vk_stencil_test_face_state *face,
702 VkCompareOp depthCompareOp,
703 bool consider_write_mask)
704 {
705 /* If compareOp is ALWAYS then the stencil test will never fail and failOp
706 * will never happen. Set failOp to KEEP in this case.
707 */
708 if (face->op.compare == VK_COMPARE_OP_ALWAYS)
709 face->op.fail = VK_STENCIL_OP_KEEP;
710
711 /* If compareOp is NEVER or depthCompareOp is NEVER then one of the depth
712 * or stencil tests will fail and passOp will never happen.
713 */
714 if (face->op.compare == VK_COMPARE_OP_NEVER ||
715 depthCompareOp == VK_COMPARE_OP_NEVER)
716 face->op.pass = VK_STENCIL_OP_KEEP;
717
718 /* If compareOp is NEVER or depthCompareOp is ALWAYS then either the
719 * stencil test will fail or the depth test will pass. In either case,
720 * depthFailOp will never happen.
721 */
722 if (face->op.compare == VK_COMPARE_OP_NEVER ||
723 depthCompareOp == VK_COMPARE_OP_ALWAYS)
724 face->op.depth_fail = VK_STENCIL_OP_KEEP;
725
726 /* If the write mask is zero, nothing will be written to the stencil buffer
727 * so it's as if all operations are KEEP.
728 */
729 if (consider_write_mask && face->write_mask == 0) {
730 face->op.pass = VK_STENCIL_OP_KEEP;
731 face->op.fail = VK_STENCIL_OP_KEEP;
732 face->op.depth_fail = VK_STENCIL_OP_KEEP;
733 }
734
735 return face->op.fail != VK_STENCIL_OP_KEEP ||
736 face->op.depth_fail != VK_STENCIL_OP_KEEP ||
737 face->op.pass != VK_STENCIL_OP_KEEP;
738 }
739
740 void
vk_optimize_depth_stencil_state(struct vk_depth_stencil_state * ds,VkImageAspectFlags ds_aspects,bool consider_write_mask)741 vk_optimize_depth_stencil_state(struct vk_depth_stencil_state *ds,
742 VkImageAspectFlags ds_aspects,
743 bool consider_write_mask)
744 {
745 /* stencil.write_enable is a dummy right now that should always be true */
746 assert(ds->stencil.write_enable);
747
748 /* From the Vulkan 1.3.221 spec:
749 *
750 * "If there is no depth attachment then the depth test is skipped."
751 */
752 if (!(ds_aspects & VK_IMAGE_ASPECT_DEPTH_BIT))
753 ds->depth.test_enable = false;
754
755 /* From the Vulkan 1.3.221 spec:
756 *
757 * "...or if there is no stencil attachment, the coverage mask is
758 * unmodified by this operation."
759 */
760 if (!(ds_aspects & VK_IMAGE_ASPECT_STENCIL_BIT))
761 ds->stencil.test_enable = false;
762
763 /* If the depth test is disabled, we won't be writing anything. Make sure we
764 * treat the test as always passing later on as well.
765 */
766 if (!ds->depth.test_enable) {
767 ds->depth.write_enable = false;
768 ds->depth.compare_op = VK_COMPARE_OP_ALWAYS;
769 }
770
771 /* If the stencil test is disabled, we won't be writing anything. Make sure
772 * we treat the test as always passing later on as well.
773 */
774 if (!ds->stencil.test_enable) {
775 ds->stencil.write_enable = false;
776 ds->stencil.front.op.compare = VK_COMPARE_OP_ALWAYS;
777 ds->stencil.back.op.compare = VK_COMPARE_OP_ALWAYS;
778 }
779
780 /* If the stencil test is enabled and always fails, then we will never get
781 * to the depth test so we can just disable the depth test entirely.
782 */
783 if (ds->stencil.test_enable &&
784 ds->stencil.front.op.compare == VK_COMPARE_OP_NEVER &&
785 ds->stencil.back.op.compare == VK_COMPARE_OP_NEVER) {
786 ds->depth.test_enable = false;
787 ds->depth.write_enable = false;
788 }
789
790 /* If depthCompareOp is EQUAL then the value we would be writing to the
791 * depth buffer is the same as the value that's already there so there's no
792 * point in writing it.
793 */
794 if (ds->depth.compare_op == VK_COMPARE_OP_EQUAL)
795 ds->depth.write_enable = false;
796
797 /* If the stencil ops are such that we don't actually ever modify the
798 * stencil buffer, we should disable writes.
799 */
800 if (!optimize_stencil_face(&ds->stencil.front, ds->depth.compare_op,
801 consider_write_mask) &&
802 !optimize_stencil_face(&ds->stencil.back, ds->depth.compare_op,
803 consider_write_mask))
804 ds->stencil.write_enable = false;
805
806 /* If the depth test always passes and we never write out depth, that's the
807 * same as if the depth test is disabled entirely.
808 */
809 if (ds->depth.compare_op == VK_COMPARE_OP_ALWAYS && !ds->depth.write_enable)
810 ds->depth.test_enable = false;
811
812 /* If the stencil test always passes and we never write out stencil, that's
813 * the same as if the stencil test is disabled entirely.
814 */
815 if (ds->stencil.front.op.compare == VK_COMPARE_OP_ALWAYS &&
816 ds->stencil.back.op.compare == VK_COMPARE_OP_ALWAYS &&
817 !ds->stencil.write_enable)
818 ds->stencil.test_enable = false;
819 }
820
821 static void
vk_color_blend_state_init(struct vk_color_blend_state * cb,const BITSET_WORD * dynamic,const VkPipelineColorBlendStateCreateInfo * cb_info)822 vk_color_blend_state_init(struct vk_color_blend_state *cb,
823 const BITSET_WORD *dynamic,
824 const VkPipelineColorBlendStateCreateInfo *cb_info)
825 {
826 memset(cb, 0, sizeof(*cb));
827
828 cb->logic_op_enable = cb_info->logicOpEnable;
829 cb->logic_op = cb_info->logicOp;
830
831 assert(cb_info->attachmentCount <= MESA_VK_MAX_COLOR_ATTACHMENTS);
832 cb->attachment_count = cb_info->attachmentCount;
833 for (uint32_t a = 0; a < cb_info->attachmentCount; a++) {
834 const VkPipelineColorBlendAttachmentState *att =
835 &cb_info->pAttachments[a];
836
837 cb->attachments[a] = (struct vk_color_blend_attachment_state) {
838 .blend_enable = att->blendEnable,
839 .src_color_blend_factor = att->srcColorBlendFactor,
840 .dst_color_blend_factor = att->dstColorBlendFactor,
841 .src_alpha_blend_factor = att->srcAlphaBlendFactor,
842 .dst_alpha_blend_factor = att->dstAlphaBlendFactor,
843 .write_mask = att->colorWriteMask,
844 .color_blend_op = att->colorBlendOp,
845 .alpha_blend_op = att->alphaBlendOp,
846 };
847 }
848
849 for (uint32_t i = 0; i < 4; i++)
850 cb->blend_constants[i] = cb_info->blendConstants[i];
851
852 const VkPipelineColorWriteCreateInfoEXT *cw_info =
853 vk_find_struct_const(cb_info->pNext, PIPELINE_COLOR_WRITE_CREATE_INFO_EXT);
854 if (cw_info != NULL) {
855 assert(cb_info->attachmentCount == cw_info->attachmentCount);
856 for (uint32_t a = 0; a < cw_info->attachmentCount; a++) {
857 if (cw_info->pColorWriteEnables[a])
858 cb->color_write_enables |= BITFIELD_BIT(a);
859 }
860 } else {
861 cb->color_write_enables = BITFIELD_MASK(cb_info->attachmentCount);
862 }
863 }
864
865 static void
vk_dynamic_graphics_state_init_cb(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_color_blend_state * cb)866 vk_dynamic_graphics_state_init_cb(struct vk_dynamic_graphics_state *dst,
867 const BITSET_WORD *needed,
868 const struct vk_color_blend_state *cb)
869 {
870 dst->cb.logic_op = cb->logic_op;
871 dst->cb.color_write_enables = cb->color_write_enables;
872
873 if (IS_NEEDED(CB_BLEND_CONSTANTS))
874 typed_memcpy(dst->cb.blend_constants, cb->blend_constants, 4);
875 }
876
877 static bool
vk_render_pass_state_is_complete(const struct vk_render_pass_state * rp)878 vk_render_pass_state_is_complete(const struct vk_render_pass_state *rp)
879 {
880 return rp->attachment_aspects != VK_IMAGE_ASPECT_METADATA_BIT;
881 }
882
883 static void
vk_render_pass_state_init(struct vk_render_pass_state * rp,const struct vk_render_pass_state * old_rp,const VkGraphicsPipelineCreateInfo * info,const struct vk_subpass_info * sp_info,VkGraphicsPipelineLibraryFlagsEXT lib)884 vk_render_pass_state_init(struct vk_render_pass_state *rp,
885 const struct vk_render_pass_state *old_rp,
886 const VkGraphicsPipelineCreateInfo *info,
887 const struct vk_subpass_info *sp_info,
888 VkGraphicsPipelineLibraryFlagsEXT lib)
889 {
890 /* If we already have render pass state and it has attachment info, then
891 * it's complete and we don't need a new one.
892 */
893 if (old_rp != NULL && vk_render_pass_state_is_complete(old_rp)) {
894 *rp = *old_rp;
895 return;
896 }
897
898 *rp = (struct vk_render_pass_state) {
899 .render_pass = info->renderPass,
900 .subpass = info->subpass,
901 .depth_attachment_format = VK_FORMAT_UNDEFINED,
902 .stencil_attachment_format = VK_FORMAT_UNDEFINED,
903 };
904
905 if (info->renderPass != VK_NULL_HANDLE && sp_info != NULL) {
906 rp->attachment_aspects = sp_info->attachment_aspects;
907 rp->view_mask = sp_info->view_mask;
908 return;
909 }
910
911 const VkPipelineRenderingCreateInfo *r_info =
912 vk_get_pipeline_rendering_create_info(info);
913
914 if (r_info == NULL)
915 return;
916
917 rp->view_mask = r_info->viewMask;
918
919 /* From the Vulkan 1.3.218 spec description of pre-rasterization state:
920 *
921 * "Fragment shader state is defined by:
922 * ...
923 * * VkRenderPass and subpass parameter
924 * * The viewMask parameter of VkPipelineRenderingCreateInfo (formats
925 * are ignored)"
926 *
927 * The description of fragment shader state contains identical text.
928 *
929 * If we have a render pass then we have full information. Even if we're
930 * dynamic-rendering-only, the presence of a render pass means the
931 * rendering info came from a vk_render_pass and is therefore complete.
932 * Otherwise, all we can grab is the view mask and we have to leave the
933 * rest for later.
934 */
935 if (info->renderPass == VK_NULL_HANDLE &&
936 !(lib & VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT)) {
937 rp->attachment_aspects = VK_IMAGE_ASPECT_METADATA_BIT;
938 return;
939 }
940
941 assert(r_info->colorAttachmentCount <= MESA_VK_MAX_COLOR_ATTACHMENTS);
942 rp->color_attachment_count = r_info->colorAttachmentCount;
943 for (uint32_t i = 0; i < r_info->colorAttachmentCount; i++) {
944 rp->color_attachment_formats[i] = r_info->pColorAttachmentFormats[i];
945 if (r_info->pColorAttachmentFormats[i] != VK_FORMAT_UNDEFINED)
946 rp->attachment_aspects |= VK_IMAGE_ASPECT_COLOR_BIT;
947 }
948
949 rp->depth_attachment_format = r_info->depthAttachmentFormat;
950 if (r_info->depthAttachmentFormat != VK_FORMAT_UNDEFINED)
951 rp->attachment_aspects |= VK_IMAGE_ASPECT_DEPTH_BIT;
952
953 rp->stencil_attachment_format = r_info->stencilAttachmentFormat;
954 if (r_info->stencilAttachmentFormat != VK_FORMAT_UNDEFINED)
955 rp->attachment_aspects |= VK_IMAGE_ASPECT_STENCIL_BIT;
956
957 const VkRenderingSelfDependencyInfoMESA *rsd_info =
958 vk_find_struct_const(r_info->pNext, RENDERING_SELF_DEPENDENCY_INFO_MESA);
959 if (rsd_info != NULL) {
960 STATIC_ASSERT(sizeof(rp->color_self_dependencies) * 8 >=
961 MESA_VK_MAX_COLOR_ATTACHMENTS);
962 rp->color_self_dependencies = rsd_info->colorSelfDependencies;
963 rp->depth_self_dependency = rsd_info->depthSelfDependency;
964 rp->stencil_self_dependency = rsd_info->stencilSelfDependency;
965 }
966 }
967
968 static void
vk_dynamic_graphics_state_init_rp(struct vk_dynamic_graphics_state * dst,const BITSET_WORD * needed,const struct vk_render_pass_state * rp)969 vk_dynamic_graphics_state_init_rp(struct vk_dynamic_graphics_state *dst,
970 const BITSET_WORD *needed,
971 const struct vk_render_pass_state *rp)
972 { }
973
974 #define FOREACH_STATE_GROUP(f) \
975 f(MESA_VK_GRAPHICS_STATE_VERTEX_INPUT_BIT, \
976 vk_vertex_input_state, vi); \
977 f(MESA_VK_GRAPHICS_STATE_INPUT_ASSEMBLY_BIT, \
978 vk_input_assembly_state, ia); \
979 f(MESA_VK_GRAPHICS_STATE_TESSELLATION_BIT, \
980 vk_tessellation_state, ts); \
981 f(MESA_VK_GRAPHICS_STATE_VIEWPORT_BIT, \
982 vk_viewport_state, vp); \
983 f(MESA_VK_GRAPHICS_STATE_DISCARD_RECTANGLES_BIT, \
984 vk_discard_rectangles_state, dr); \
985 f(MESA_VK_GRAPHICS_STATE_RASTERIZATION_BIT, \
986 vk_rasterization_state, rs); \
987 f(MESA_VK_GRAPHICS_STATE_FRAGMENT_SHADING_RATE_BIT, \
988 vk_fragment_shading_rate_state, fsr); \
989 f(MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT, \
990 vk_multisample_state, ms); \
991 f(MESA_VK_GRAPHICS_STATE_DEPTH_STENCIL_BIT, \
992 vk_depth_stencil_state, ds); \
993 f(MESA_VK_GRAPHICS_STATE_COLOR_BLEND_BIT, \
994 vk_color_blend_state, cb); \
995 f(MESA_VK_GRAPHICS_STATE_RENDER_PASS_BIT, \
996 vk_render_pass_state, rp);
997
998 static enum mesa_vk_graphics_state_groups
vk_graphics_pipeline_state_groups(const struct vk_graphics_pipeline_state * state)999 vk_graphics_pipeline_state_groups(const struct vk_graphics_pipeline_state *state)
1000 {
1001 /* For now, we just validate dynamic state */
1002 enum mesa_vk_graphics_state_groups groups = 0;
1003
1004 #define FILL_HAS(STATE, type, s) \
1005 if (state->s != NULL) groups |= STATE
1006
1007 FOREACH_STATE_GROUP(FILL_HAS)
1008
1009 #undef FILL_HAS
1010
1011 return groups | fully_dynamic_state_groups(state->dynamic);
1012 }
1013
1014 static void
vk_graphics_pipeline_state_validate(const struct vk_graphics_pipeline_state * state)1015 vk_graphics_pipeline_state_validate(const struct vk_graphics_pipeline_state *state)
1016 {
1017 #ifndef NDEBUG
1018 /* For now, we just validate dynamic state */
1019 enum mesa_vk_graphics_state_groups groups =
1020 vk_graphics_pipeline_state_groups(state);
1021 validate_dynamic_state_groups(state->dynamic, groups);
1022 #endif
1023 }
1024
1025 static bool
may_have_rasterization(const struct vk_graphics_pipeline_state * state,const BITSET_WORD * dynamic,const VkGraphicsPipelineCreateInfo * info)1026 may_have_rasterization(const struct vk_graphics_pipeline_state *state,
1027 const BITSET_WORD *dynamic,
1028 const VkGraphicsPipelineCreateInfo *info)
1029 {
1030 if (state->rs) {
1031 /* We default rasterizer_discard_enable to false when dynamic */
1032 return !state->rs->rasterizer_discard_enable;
1033 } else {
1034 return IS_DYNAMIC(RS_RASTERIZER_DISCARD_ENABLE) ||
1035 !info->pRasterizationState->rasterizerDiscardEnable;
1036 }
1037 }
1038
1039 VkResult
vk_graphics_pipeline_state_fill(const struct vk_device * device,struct vk_graphics_pipeline_state * state,const VkGraphicsPipelineCreateInfo * info,const struct vk_subpass_info * sp_info,struct vk_graphics_pipeline_all_state * all,const VkAllocationCallbacks * alloc,VkSystemAllocationScope scope,void ** alloc_ptr_out)1040 vk_graphics_pipeline_state_fill(const struct vk_device *device,
1041 struct vk_graphics_pipeline_state *state,
1042 const VkGraphicsPipelineCreateInfo *info,
1043 const struct vk_subpass_info *sp_info,
1044 struct vk_graphics_pipeline_all_state *all,
1045 const VkAllocationCallbacks *alloc,
1046 VkSystemAllocationScope scope,
1047 void **alloc_ptr_out)
1048 {
1049 vk_graphics_pipeline_state_validate(state);
1050
1051 BITSET_DECLARE(dynamic, MESA_VK_DYNAMIC_GRAPHICS_STATE_ENUM_MAX);
1052 vk_get_dynamic_graphics_states(dynamic, info->pDynamicState);
1053
1054 for (uint32_t i = 0; i < info->stageCount; i++)
1055 state->shader_stages |= info->pStages[i].stage;
1056
1057 /* In case we return early */
1058 if (alloc_ptr_out != NULL)
1059 *alloc_ptr_out = NULL;
1060
1061 /*
1062 * First, figure out which library-level shader/state groups we need
1063 */
1064
1065 VkGraphicsPipelineLibraryFlagsEXT lib;
1066 if (info->flags & VK_PIPELINE_CREATE_LIBRARY_BIT_KHR) {
1067 const VkGraphicsPipelineLibraryCreateInfoEXT *gfx_lib_info =
1068 vk_find_struct_const(info->pNext, GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT);
1069 lib = gfx_lib_info->flags;
1070 } else {
1071 /* We're building a complete pipeline. From the Vulkan 1.3.218 spec:
1072 *
1073 * "A complete graphics pipeline always includes pre-rasterization
1074 * shader state, with other subsets included depending on that state.
1075 * If the pre-rasterization shader state includes a vertex shader,
1076 * then vertex input state is included in a complete graphics
1077 * pipeline. If the value of
1078 * VkPipelineRasterizationStateCreateInfo::rasterizerDiscardEnable in
1079 * the pre-rasterization shader state is VK_FALSE or the
1080 * VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE dynamic state is
1081 * enabled fragment shader state and fragment output interface state
1082 * is included in a complete graphics pipeline."
1083 */
1084 lib = VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT;
1085
1086 if (state->shader_stages & VK_SHADER_STAGE_VERTEX_BIT)
1087 lib |= VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT;
1088
1089 if (may_have_rasterization(state, dynamic, info)) {
1090 lib |= VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT;
1091 lib |= VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT;
1092 }
1093 }
1094
1095 /*
1096 * Next, turn those into individual states. Among other things, this
1097 * de-duplicates things like FSR and multisample state which appear in
1098 * multiple library groups.
1099 */
1100
1101 enum mesa_vk_graphics_state_groups needs = 0;
1102 if (lib & VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT) {
1103 needs |= MESA_VK_GRAPHICS_STATE_VERTEX_INPUT_BIT;
1104 needs |= MESA_VK_GRAPHICS_STATE_INPUT_ASSEMBLY_BIT;
1105 }
1106
1107 /* Other stuff potentially depends on this so gather it early */
1108 struct vk_render_pass_state rp;
1109 if (lib & (VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT |
1110 VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT |
1111 VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT)) {
1112 vk_render_pass_state_init(&rp, state->rp, info, sp_info, lib);
1113
1114 needs |= MESA_VK_GRAPHICS_STATE_RENDER_PASS_BIT;
1115
1116 /* If the old state was incomplete but the new one isn't, set state->rp
1117 * to NULL so it gets replaced with the new version.
1118 */
1119 if (state->rp != NULL &&
1120 !vk_render_pass_state_is_complete(state->rp) &&
1121 vk_render_pass_state_is_complete(&rp))
1122 state->rp = NULL;
1123 }
1124
1125 if (lib & VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT) {
1126 /* From the Vulkan 1.3.218 spec:
1127 *
1128 * VUID-VkGraphicsPipelineCreateInfo-stage-02096
1129 *
1130 * "If the pipeline is being created with pre-rasterization shader
1131 * state the stage member of one element of pStages must be either
1132 * VK_SHADER_STAGE_VERTEX_BIT or VK_SHADER_STAGE_MESH_BIT_NV"
1133 */
1134 assert(state->shader_stages & (VK_SHADER_STAGE_VERTEX_BIT |
1135 VK_SHADER_STAGE_MESH_BIT_NV));
1136
1137 if (state->shader_stages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
1138 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
1139 needs |= MESA_VK_GRAPHICS_STATE_TESSELLATION_BIT;
1140
1141 if (may_have_rasterization(state, dynamic, info))
1142 needs |= MESA_VK_GRAPHICS_STATE_VIEWPORT_BIT;
1143
1144 needs |= MESA_VK_GRAPHICS_STATE_DISCARD_RECTANGLES_BIT;
1145 needs |= MESA_VK_GRAPHICS_STATE_RASTERIZATION_BIT;
1146 needs |= MESA_VK_GRAPHICS_STATE_FRAGMENT_SHADING_RATE_BIT;
1147 }
1148
1149 if (lib & VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT) {
1150 needs |= MESA_VK_GRAPHICS_STATE_FRAGMENT_SHADING_RATE_BIT;
1151
1152 /* From the Vulkan 1.3.218 spec:
1153 *
1154 * "Fragment shader state is defined by:
1155 * ...
1156 * - VkPipelineMultisampleStateCreateInfo if sample shading is
1157 * enabled or renderpass is not VK_NULL_HANDLE"
1158 *
1159 * and
1160 *
1161 * VUID-VkGraphicsPipelineCreateInfo-pMultisampleState-06629
1162 *
1163 * "If the pipeline is being created with fragment shader state
1164 * pMultisampleState must be NULL or a valid pointer to a valid
1165 * VkPipelineMultisampleStateCreateInfo structure"
1166 *
1167 * so we can reliably detect when to include it based on the
1168 * pMultisampleState pointer.
1169 */
1170 if (info->pMultisampleState != NULL)
1171 needs |= MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT;
1172
1173 /* From the Vulkan 1.3.218 spec:
1174 *
1175 * VUID-VkGraphicsPipelineCreateInfo-renderPass-06043
1176 *
1177 * "If renderPass is not VK_NULL_HANDLE, the pipeline is being
1178 * created with fragment shader state, and subpass uses a
1179 * depth/stencil attachment, pDepthStencilState must be a valid
1180 * pointer to a valid VkPipelineDepthStencilStateCreateInfo
1181 * structure"
1182 *
1183 * VUID-VkGraphicsPipelineCreateInfo-renderPass-06053
1184 *
1185 * "If renderPass is VK_NULL_HANDLE, the pipeline is being created
1186 * with fragment shader state and fragment output interface state,
1187 * and either of VkPipelineRenderingCreateInfo::depthAttachmentFormat
1188 * or VkPipelineRenderingCreateInfo::stencilAttachmentFormat are not
1189 * VK_FORMAT_UNDEFINED, pDepthStencilState must be a valid pointer to
1190 * a valid VkPipelineDepthStencilStateCreateInfo structure"
1191 *
1192 * VUID-VkGraphicsPipelineCreateInfo-renderPass-06590
1193 *
1194 * "If renderPass is VK_NULL_HANDLE and the pipeline is being created
1195 * with fragment shader state but not fragment output interface
1196 * state, pDepthStencilState must be a valid pointer to a valid
1197 * VkPipelineDepthStencilStateCreateInfo structure"
1198 *
1199 * In the first case, we'll have a real set of aspects in rp. In the
1200 * second case, where we have both fragment shader and fragment output
1201 * state, we will also have a valid set of aspects. In the third case
1202 * where we only have fragment shader state and no render pass, the
1203 * vk_render_pass_state will be incomplete.
1204 */
1205 if ((rp.attachment_aspects & (VK_IMAGE_ASPECT_DEPTH_BIT |
1206 VK_IMAGE_ASPECT_STENCIL_BIT)) ||
1207 !vk_render_pass_state_is_complete(&rp))
1208 needs |= MESA_VK_GRAPHICS_STATE_DEPTH_STENCIL_BIT;
1209 }
1210
1211 if (lib & VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT) {
1212 if (rp.attachment_aspects & (VK_IMAGE_ASPECT_COLOR_BIT))
1213 needs |= MESA_VK_GRAPHICS_STATE_COLOR_BLEND_BIT;
1214
1215 needs |= MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT;
1216 }
1217
1218 /*
1219 * Next, Filter off any states we already have.
1220 */
1221
1222 #define FILTER_NEEDS(STATE, type, s) \
1223 if (state->s != NULL) needs &= ~STATE
1224
1225 FOREACH_STATE_GROUP(FILTER_NEEDS)
1226
1227 #undef FILTER_NEEDS
1228
1229 /* Filter dynamic state down to just what we're adding */
1230 BITSET_DECLARE(dynamic_filter, MESA_VK_DYNAMIC_GRAPHICS_STATE_ENUM_MAX);
1231 get_dynamic_state_groups(dynamic_filter, needs);
1232 BITSET_AND(dynamic, dynamic, dynamic_filter);
1233
1234 /* And add it in */
1235 BITSET_OR(state->dynamic, state->dynamic, dynamic);
1236
1237 /*
1238 * If a state is fully dynamic, we don't need to even allocate them. Do
1239 * this after we've filtered dynamic state because we still want them to
1240 * show up in the dynamic state but don't want the actual state.
1241 */
1242 needs &= ~fully_dynamic_state_groups(state->dynamic);
1243
1244 /* If we don't need to set up any new states, bail early */
1245 if (needs == 0)
1246 return VK_SUCCESS;
1247
1248 /*
1249 * Now, ensure that we have space for each of the states we're going to
1250 * fill. If all != NULL, we'll pull from that. Otherwise, we need to
1251 * allocate memory.
1252 */
1253
1254 VK_MULTIALLOC(ma);
1255
1256 #define ENSURE_STATE_IF_NEEDED(STATE, type, s) \
1257 struct type *new_##s = NULL; \
1258 if (needs & STATE) { \
1259 if (all == NULL) { \
1260 vk_multialloc_add(&ma, &new_##s, struct type, 1); \
1261 } else { \
1262 new_##s = &all->s; \
1263 } \
1264 }
1265
1266 FOREACH_STATE_GROUP(ENSURE_STATE_IF_NEEDED)
1267
1268 #undef ENSURE_STATE_IF_NEEDED
1269
1270 /* Sample locations are a bit special. We don't want to waste the memory
1271 * for 64 floats if we don't need to. Also, we set up standard sample
1272 * locations if no user-provided sample locations are available.
1273 */
1274 const VkPipelineSampleLocationsStateCreateInfoEXT *sl_info = NULL;
1275 struct vk_sample_locations_state *new_sl = NULL;
1276 if (needs & MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT) {
1277 sl_info = vk_find_struct_const(info->pMultisampleState->pNext,
1278 PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT);
1279 if (needs_sample_locations_state(dynamic, sl_info)) {
1280 if (all == NULL) {
1281 vk_multialloc_add(&ma, &new_sl, struct vk_sample_locations_state, 1);
1282 } else {
1283 new_sl = &all->ms_sample_locations;
1284 }
1285 }
1286 }
1287
1288 /*
1289 * Allocate memory, if needed
1290 */
1291
1292 if (ma.size > 0) {
1293 assert(all == NULL);
1294 *alloc_ptr_out = vk_multialloc_alloc2(&ma, &device->alloc, alloc, scope);
1295 if (*alloc_ptr_out == NULL)
1296 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1297 }
1298
1299 /*
1300 * Create aliases for various input infos so we can use or FOREACH macro
1301 */
1302
1303 #define INFO_ALIAS(_State, s) \
1304 const VkPipeline##_State##StateCreateInfo *s##_info = info->p##_State##State
1305
1306 INFO_ALIAS(VertexInput, vi);
1307 INFO_ALIAS(InputAssembly, ia);
1308 INFO_ALIAS(Tessellation, ts);
1309 INFO_ALIAS(Viewport, vp);
1310 INFO_ALIAS(Rasterization, rs);
1311 INFO_ALIAS(Multisample, ms);
1312 INFO_ALIAS(DepthStencil, ds);
1313 INFO_ALIAS(ColorBlend, cb);
1314
1315 #undef INFO_ALIAS
1316
1317 const VkPipelineDiscardRectangleStateCreateInfoEXT *dr_info =
1318 vk_find_struct_const(info->pNext, PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT);
1319
1320 const VkPipelineFragmentShadingRateStateCreateInfoKHR *fsr_info =
1321 vk_find_struct_const(info->pNext, PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR);
1322
1323 /*
1324 * Finally, fill out all the states
1325 */
1326
1327 #define INIT_STATE_IF_NEEDED(STATE, type, s) \
1328 if (needs & STATE) { \
1329 type##_init(new_##s, dynamic, s##_info); \
1330 state->s = new_##s; \
1331 }
1332
1333 /* render pass state is special and we just copy it */
1334 #define vk_render_pass_state_init(s, d, i) *s = rp
1335
1336 FOREACH_STATE_GROUP(INIT_STATE_IF_NEEDED)
1337
1338 #undef vk_render_pass_state_init
1339 #undef INIT_STATE_IF_NEEDED
1340
1341 if (needs & MESA_VK_GRAPHICS_STATE_MULTISAMPLE_BIT) {
1342 vk_multisample_sample_locations_state_init(new_ms, new_sl, dynamic,
1343 ms_info, sl_info);
1344 }
1345
1346 return VK_SUCCESS;
1347 }
1348
1349 #undef IS_DYNAMIC
1350 #undef IS_NEEDED
1351
1352 void
vk_graphics_pipeline_state_merge(struct vk_graphics_pipeline_state * dst,const struct vk_graphics_pipeline_state * src)1353 vk_graphics_pipeline_state_merge(struct vk_graphics_pipeline_state *dst,
1354 const struct vk_graphics_pipeline_state *src)
1355 {
1356 vk_graphics_pipeline_state_validate(dst);
1357 vk_graphics_pipeline_state_validate(src);
1358
1359 BITSET_OR(dst->dynamic, dst->dynamic, src->dynamic);
1360
1361 dst->shader_stages |= src->shader_stages;
1362
1363 /* Render pass state needs special care because a render pass state may be
1364 * incomplete (view mask only). See vk_render_pass_state_init().
1365 */
1366 if (dst->rp != NULL && src->rp != NULL &&
1367 !vk_render_pass_state_is_complete(dst->rp) &&
1368 vk_render_pass_state_is_complete(src->rp))
1369 dst->rp = src->rp;
1370
1371 #define MERGE(STATE, type, state) \
1372 if (dst->state == NULL && src->state != NULL) dst->state = src->state;
1373
1374 FOREACH_STATE_GROUP(MERGE)
1375
1376 #undef MERGE
1377 }
1378
1379 const struct vk_dynamic_graphics_state vk_default_dynamic_graphics_state = {
1380 .rs = {
1381 .line = {
1382 .width = 1.0f,
1383 },
1384 },
1385 .fsr = {
1386 .fragment_size = {1u, 1u},
1387 .combiner_ops = {
1388 VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
1389 VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
1390 },
1391 },
1392 .ds = {
1393 .depth = {
1394 .bounds_test = {
1395 .min = 0.0f,
1396 .max = 1.0f,
1397 },
1398 },
1399 .stencil = {
1400 .write_enable = true,
1401 .front = {
1402 .compare_mask = -1,
1403 .write_mask = -1,
1404 },
1405 .back = {
1406 .compare_mask = -1,
1407 .write_mask = -1,
1408 },
1409 },
1410 },
1411 .cb = {
1412 .color_write_enables = 0xffffffffu,
1413 },
1414 };
1415
1416 void
vk_dynamic_graphics_state_init(struct vk_dynamic_graphics_state * dyn)1417 vk_dynamic_graphics_state_init(struct vk_dynamic_graphics_state *dyn)
1418 {
1419 *dyn = vk_default_dynamic_graphics_state;
1420 }
1421
1422 void
vk_dynamic_graphics_state_clear(struct vk_dynamic_graphics_state * dyn)1423 vk_dynamic_graphics_state_clear(struct vk_dynamic_graphics_state *dyn)
1424 {
1425 struct vk_vertex_input_state *vi = dyn->vi;
1426 struct vk_sample_locations_state *sl = dyn->ms.sample_locations;
1427
1428 *dyn = vk_default_dynamic_graphics_state;
1429
1430 if (vi != NULL) {
1431 memset(vi, 0, sizeof(*vi));
1432 dyn->vi = vi;
1433 }
1434
1435 if (sl != NULL) {
1436 memset(sl, 0, sizeof(*sl));
1437 dyn->ms.sample_locations = sl;
1438 }
1439 }
1440
1441 void
vk_dynamic_graphics_state_fill(struct vk_dynamic_graphics_state * dyn,const struct vk_graphics_pipeline_state * p)1442 vk_dynamic_graphics_state_fill(struct vk_dynamic_graphics_state *dyn,
1443 const struct vk_graphics_pipeline_state *p)
1444 {
1445 /* This funciton (and the individual vk_dynamic_graphics_state_init_*
1446 * functions it calls) are a bit sloppy. Instead of checking every single
1447 * bit, we just copy everything and set the bits the right way at the end
1448 * based on what groups we actually had.
1449 */
1450 enum mesa_vk_graphics_state_groups groups = 0;
1451
1452 BITSET_DECLARE(needed, MESA_VK_DYNAMIC_GRAPHICS_STATE_ENUM_MAX);
1453 BITSET_COPY(needed, p->dynamic);
1454 BITSET_NOT(needed);
1455
1456 /* We only want to copy these if the driver has filled out the relevant
1457 * pointer in the dynamic state struct. If not, they don't support them
1458 * as dynamic state and we should leave them alone.
1459 */
1460 if (dyn->vi == NULL)
1461 BITSET_CLEAR(needed, MESA_VK_DYNAMIC_VI);
1462 if (dyn->ms.sample_locations == NULL)
1463 BITSET_CLEAR(needed, MESA_VK_DYNAMIC_MS_SAMPLE_LOCATIONS);
1464
1465 #define INIT_DYNAMIC_STATE(STATE, type, s) \
1466 if (p->s != NULL) { \
1467 vk_dynamic_graphics_state_init_##s(dyn, needed, p->s); \
1468 groups |= STATE; \
1469 }
1470
1471 FOREACH_STATE_GROUP(INIT_DYNAMIC_STATE);
1472
1473 #undef INIT_DYNAMIC_STATE
1474
1475 /* Mask off all but the groups we actually found */
1476 get_dynamic_state_groups(dyn->set, groups);
1477 BITSET_AND(dyn->set, dyn->set, needed);
1478 }
1479
1480 #define SET_DYN_VALUE(dst, STATE, state, value) do { \
1481 if (!BITSET_TEST((dst)->set, MESA_VK_DYNAMIC_##STATE) || \
1482 (dst)->state != (value)) { \
1483 (dst)->state = (value); \
1484 assert((dst)->state == (value)); \
1485 BITSET_SET(dst->set, MESA_VK_DYNAMIC_##STATE); \
1486 BITSET_SET(dst->dirty, MESA_VK_DYNAMIC_##STATE); \
1487 } \
1488 } while(0)
1489
1490 #define SET_DYN_BOOL(dst, STATE, state, value) \
1491 SET_DYN_VALUE(dst, STATE, state, (bool)value);
1492
1493 #define SET_DYN_ARRAY(dst, STATE, state, start, count, src) do { \
1494 assert(start + count <= ARRAY_SIZE((dst)->state)); \
1495 STATIC_ASSERT(sizeof(*(dst)->state) == sizeof(*(src))); \
1496 const size_t __state_size = sizeof(*(dst)->state) * (count); \
1497 if (!BITSET_TEST((dst)->set, MESA_VK_DYNAMIC_##STATE) || \
1498 memcmp((dst)->state + start, src, __state_size)) { \
1499 memcpy((dst)->state + start, src, __state_size); \
1500 BITSET_SET(dst->set, MESA_VK_DYNAMIC_##STATE); \
1501 BITSET_SET(dst->dirty, MESA_VK_DYNAMIC_##STATE); \
1502 } \
1503 } while(0)
1504
1505 void
vk_dynamic_graphics_state_copy(struct vk_dynamic_graphics_state * dst,const struct vk_dynamic_graphics_state * src)1506 vk_dynamic_graphics_state_copy(struct vk_dynamic_graphics_state *dst,
1507 const struct vk_dynamic_graphics_state *src)
1508 {
1509 #define IS_SET_IN_SRC(STATE) \
1510 BITSET_TEST(src->set, MESA_VK_DYNAMIC_##STATE)
1511
1512 #define COPY_MEMBER(STATE, state) \
1513 SET_DYN_VALUE(dst, STATE, state, src->state)
1514
1515 #define COPY_ARRAY(STATE, state, count) \
1516 SET_DYN_ARRAY(dst, STATE, state, 0, count, src->state)
1517
1518 #define COPY_IF_SET(STATE, state) \
1519 if (IS_SET_IN_SRC(STATE)) SET_DYN_VALUE(dst, STATE, state, src->state)
1520
1521 assert((dst->vi != NULL) == (src->vi != NULL));
1522 if (dst->vi != NULL && IS_SET_IN_SRC(VI)) {
1523 COPY_MEMBER(VI, vi->bindings_valid);
1524 u_foreach_bit(b, src->vi->bindings_valid) {
1525 COPY_MEMBER(VI, vi->bindings[b].stride);
1526 COPY_MEMBER(VI, vi->bindings[b].input_rate);
1527 COPY_MEMBER(VI, vi->bindings[b].divisor);
1528 }
1529 COPY_MEMBER(VI, vi->attributes_valid);
1530 u_foreach_bit(a, src->vi->attributes_valid) {
1531 COPY_MEMBER(VI, vi->attributes[a].binding);
1532 COPY_MEMBER(VI, vi->attributes[a].format);
1533 COPY_MEMBER(VI, vi->attributes[a].offset);
1534 }
1535 }
1536
1537 if (IS_SET_IN_SRC(VI_BINDING_STRIDES)) {
1538 COPY_ARRAY(VI_BINDING_STRIDES, vi_binding_strides,
1539 MESA_VK_MAX_VERTEX_BINDINGS);
1540 }
1541
1542 COPY_IF_SET(IA_PRIMITIVE_TOPOLOGY, ia.primitive_topology);
1543 COPY_IF_SET(IA_PRIMITIVE_RESTART_ENABLE, ia.primitive_restart_enable);
1544 COPY_IF_SET(TS_PATCH_CONTROL_POINTS, ts.patch_control_points);
1545
1546 COPY_IF_SET(VP_VIEWPORT_COUNT, vp.viewport_count);
1547 if (IS_SET_IN_SRC(VP_VIEWPORTS)) {
1548 assert(IS_SET_IN_SRC(VP_VIEWPORT_COUNT));
1549 COPY_ARRAY(VP_VIEWPORTS, vp.viewports, src->vp.viewport_count);
1550 }
1551
1552 COPY_IF_SET(VP_SCISSOR_COUNT, vp.scissor_count);
1553 if (IS_SET_IN_SRC(VP_SCISSORS)) {
1554 assert(IS_SET_IN_SRC(VP_SCISSOR_COUNT));
1555 COPY_ARRAY(VP_SCISSORS, vp.scissors, src->vp.scissor_count);
1556 }
1557
1558 if (IS_SET_IN_SRC(DR_RECTANGLES)) {
1559 COPY_MEMBER(DR_RECTANGLES, dr.rectangle_count);
1560 COPY_ARRAY(DR_RECTANGLES, dr.rectangles, src->dr.rectangle_count);
1561 }
1562
1563 COPY_IF_SET(RS_RASTERIZER_DISCARD_ENABLE, rs.rasterizer_discard_enable);
1564 COPY_IF_SET(RS_CULL_MODE, rs.cull_mode);
1565 COPY_IF_SET(RS_FRONT_FACE, rs.front_face);
1566 COPY_IF_SET(RS_DEPTH_BIAS_ENABLE, rs.depth_bias.enable);
1567 COPY_IF_SET(RS_DEPTH_BIAS_FACTORS, rs.depth_bias.constant);
1568 COPY_IF_SET(RS_DEPTH_BIAS_FACTORS, rs.depth_bias.clamp);
1569 COPY_IF_SET(RS_DEPTH_BIAS_FACTORS, rs.depth_bias.slope);
1570 COPY_IF_SET(RS_LINE_WIDTH, rs.line.width);
1571 COPY_IF_SET(RS_LINE_STIPPLE, rs.line.stipple.factor);
1572 COPY_IF_SET(RS_LINE_STIPPLE, rs.line.stipple.pattern);
1573
1574 COPY_IF_SET(FSR, fsr.fragment_size.width);
1575 COPY_IF_SET(FSR, fsr.fragment_size.height);
1576 COPY_IF_SET(FSR, fsr.combiner_ops[0]);
1577 COPY_IF_SET(FSR, fsr.combiner_ops[1]);
1578
1579 assert((dst->ms.sample_locations == NULL) ==
1580 (src->ms.sample_locations == NULL));
1581 if (dst->ms.sample_locations != NULL &&
1582 IS_SET_IN_SRC(MS_SAMPLE_LOCATIONS)) {
1583 COPY_MEMBER(MS_SAMPLE_LOCATIONS, ms.sample_locations->per_pixel);
1584 COPY_MEMBER(MS_SAMPLE_LOCATIONS, ms.sample_locations->grid_size.width);
1585 COPY_MEMBER(MS_SAMPLE_LOCATIONS, ms.sample_locations->grid_size.height);
1586 const uint32_t sl_count = src->ms.sample_locations->per_pixel *
1587 src->ms.sample_locations->grid_size.width *
1588 src->ms.sample_locations->grid_size.height;
1589 COPY_ARRAY(MS_SAMPLE_LOCATIONS, ms.sample_locations->locations, sl_count);
1590 }
1591
1592 COPY_IF_SET(DS_DEPTH_TEST_ENABLE, ds.depth.test_enable);
1593 COPY_IF_SET(DS_DEPTH_WRITE_ENABLE, ds.depth.write_enable);
1594 COPY_IF_SET(DS_DEPTH_COMPARE_OP, ds.depth.compare_op);
1595 COPY_IF_SET(DS_DEPTH_BOUNDS_TEST_ENABLE, ds.depth.bounds_test.enable);
1596 if (IS_SET_IN_SRC(DS_DEPTH_BOUNDS_TEST_BOUNDS)) {
1597 COPY_MEMBER(DS_DEPTH_BOUNDS_TEST_BOUNDS, ds.depth.bounds_test.min);
1598 COPY_MEMBER(DS_DEPTH_BOUNDS_TEST_BOUNDS, ds.depth.bounds_test.max);
1599 }
1600
1601 COPY_IF_SET(DS_STENCIL_TEST_ENABLE, ds.stencil.test_enable);
1602 if (IS_SET_IN_SRC(DS_STENCIL_OP)) {
1603 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.front.op.fail);
1604 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.front.op.pass);
1605 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.front.op.depth_fail);
1606 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.front.op.compare);
1607 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.back.op.fail);
1608 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.back.op.pass);
1609 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.back.op.depth_fail);
1610 COPY_MEMBER(DS_STENCIL_OP, ds.stencil.back.op.compare);
1611 }
1612 if (IS_SET_IN_SRC(DS_STENCIL_COMPARE_MASK)) {
1613 COPY_MEMBER(DS_STENCIL_COMPARE_MASK, ds.stencil.front.compare_mask);
1614 COPY_MEMBER(DS_STENCIL_COMPARE_MASK, ds.stencil.back.compare_mask);
1615 }
1616 if (IS_SET_IN_SRC(DS_STENCIL_WRITE_MASK)) {
1617 COPY_MEMBER(DS_STENCIL_WRITE_MASK, ds.stencil.front.write_mask);
1618 COPY_MEMBER(DS_STENCIL_WRITE_MASK, ds.stencil.back.write_mask);
1619 }
1620 if (IS_SET_IN_SRC(DS_STENCIL_REFERENCE)) {
1621 COPY_MEMBER(DS_STENCIL_REFERENCE, ds.stencil.front.reference);
1622 COPY_MEMBER(DS_STENCIL_REFERENCE, ds.stencil.back.reference);
1623 }
1624
1625 COPY_IF_SET(CB_LOGIC_OP, cb.logic_op);
1626 COPY_IF_SET(CB_COLOR_WRITE_ENABLES, cb.color_write_enables);
1627 if (IS_SET_IN_SRC(CB_BLEND_CONSTANTS))
1628 COPY_ARRAY(CB_BLEND_CONSTANTS, cb.blend_constants, 4);
1629
1630 #undef IS_SET_IN_SRC
1631 #undef MARK_DIRTY
1632 #undef COPY_MEMBER
1633 #undef COPY_ARRAY
1634 #undef COPY_IF_SET
1635
1636 for (uint32_t w = 0; w < ARRAY_SIZE(dst->dirty); w++) {
1637 /* If it's in the source but isn't set in the destination at all, mark
1638 * it dirty. It's possible that the default values just happen to equal
1639 * the value from src.
1640 */
1641 dst->dirty[w] |= src->set[w] & ~dst->set[w];
1642
1643 /* Everything that was in the source is now in the destination */
1644 dst->set[w] |= src->set[w];
1645 }
1646 }
1647
1648 void
vk_cmd_set_dynamic_graphics_state(struct vk_command_buffer * cmd,const struct vk_dynamic_graphics_state * state)1649 vk_cmd_set_dynamic_graphics_state(struct vk_command_buffer *cmd,
1650 const struct vk_dynamic_graphics_state *state)
1651 {
1652 vk_dynamic_graphics_state_copy(&cmd->dynamic_graphics_state, state);
1653 }
1654
1655 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetVertexInputEXT(VkCommandBuffer commandBuffer,uint32_t vertexBindingDescriptionCount,const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions)1656 vk_common_CmdSetVertexInputEXT(VkCommandBuffer commandBuffer,
1657 uint32_t vertexBindingDescriptionCount,
1658 const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions,
1659 uint32_t vertexAttributeDescriptionCount,
1660 const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions)
1661 {
1662 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1663 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1664
1665 uint32_t bindings_valid = 0;
1666 for (uint32_t i = 0; i < vertexBindingDescriptionCount; i++) {
1667 const VkVertexInputBindingDescription2EXT *desc =
1668 &pVertexBindingDescriptions[i];
1669
1670 assert(desc->binding < MESA_VK_MAX_VERTEX_BINDINGS);
1671 assert(desc->stride <= MESA_VK_MAX_VERTEX_BINDING_STRIDE);
1672 assert(desc->inputRate <= UINT8_MAX);
1673
1674 const uint32_t b = desc->binding;
1675 bindings_valid |= BITFIELD_BIT(b);
1676 SET_DYN_VALUE(dyn, VI, vi->bindings[b].stride, desc->stride);
1677 SET_DYN_VALUE(dyn, VI, vi->bindings[b].input_rate, desc->inputRate);
1678 SET_DYN_VALUE(dyn, VI, vi->bindings[b].divisor, desc->divisor);
1679
1680 /* Also set bindings_strides in case a driver is keying off that */
1681 SET_DYN_VALUE(dyn, VI_BINDING_STRIDES,
1682 vi_binding_strides[b], desc->stride);
1683 }
1684 SET_DYN_VALUE(dyn, VI, vi->bindings_valid, bindings_valid);
1685
1686 uint32_t attributes_valid = 0;
1687 for (uint32_t i = 0; i < vertexAttributeDescriptionCount; i++) {
1688 const VkVertexInputAttributeDescription2EXT *desc =
1689 &pVertexAttributeDescriptions[i];
1690
1691 assert(desc->location < MESA_VK_MAX_VERTEX_ATTRIBUTES);
1692 assert(desc->binding < MESA_VK_MAX_VERTEX_BINDINGS);
1693 assert(bindings_valid & BITFIELD_BIT(desc->binding));
1694
1695 const uint32_t a = desc->location;
1696 attributes_valid |= BITFIELD_BIT(a);
1697 SET_DYN_VALUE(dyn, VI, vi->attributes[a].binding, desc->binding);
1698 SET_DYN_VALUE(dyn, VI, vi->attributes[a].format, desc->format);
1699 SET_DYN_VALUE(dyn, VI, vi->attributes[a].offset, desc->offset);
1700 }
1701 SET_DYN_VALUE(dyn, VI, vi->attributes_valid, attributes_valid);
1702 }
1703
1704 void
vk_cmd_set_vertex_binding_strides(struct vk_command_buffer * cmd,uint32_t first_binding,uint32_t binding_count,const VkDeviceSize * strides)1705 vk_cmd_set_vertex_binding_strides(struct vk_command_buffer *cmd,
1706 uint32_t first_binding,
1707 uint32_t binding_count,
1708 const VkDeviceSize *strides)
1709 {
1710 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1711
1712 for (uint32_t i = 0; i < binding_count; i++) {
1713 SET_DYN_VALUE(dyn, VI_BINDING_STRIDES,
1714 vi_binding_strides[first_binding + i], strides[i]);
1715 }
1716 }
1717
1718 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)1719 vk_common_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,
1720 VkPrimitiveTopology primitiveTopology)
1721 {
1722 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1723 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1724
1725 SET_DYN_VALUE(dyn, IA_PRIMITIVE_TOPOLOGY,
1726 ia.primitive_topology, primitiveTopology);
1727 }
1728
1729 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)1730 vk_common_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,
1731 VkBool32 primitiveRestartEnable)
1732 {
1733 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1734 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1735
1736 SET_DYN_BOOL(dyn, IA_PRIMITIVE_RESTART_ENABLE,
1737 ia.primitive_restart_enable, primitiveRestartEnable);
1738 }
1739
1740 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,uint32_t patchControlPoints)1741 vk_common_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,
1742 uint32_t patchControlPoints)
1743 {
1744 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1745 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1746
1747 SET_DYN_VALUE(dyn, TS_PATCH_CONTROL_POINTS,
1748 ts.patch_control_points, patchControlPoints);
1749 }
1750
1751 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1752 vk_common_CmdSetViewport(VkCommandBuffer commandBuffer,
1753 uint32_t firstViewport,
1754 uint32_t viewportCount,
1755 const VkViewport *pViewports)
1756 {
1757 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1758 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1759
1760 SET_DYN_ARRAY(dyn, VP_VIEWPORTS, vp.viewports,
1761 firstViewport, viewportCount, pViewports);
1762 }
1763
1764 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)1765 vk_common_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,
1766 uint32_t viewportCount,
1767 const VkViewport *pViewports)
1768 {
1769 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1770 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1771
1772 SET_DYN_VALUE(dyn, VP_VIEWPORT_COUNT, vp.viewport_count, viewportCount);
1773 SET_DYN_ARRAY(dyn, VP_VIEWPORTS, vp.viewports, 0, viewportCount, pViewports);
1774 }
1775
1776 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)1777 vk_common_CmdSetScissor(VkCommandBuffer commandBuffer,
1778 uint32_t firstScissor,
1779 uint32_t scissorCount,
1780 const VkRect2D *pScissors)
1781 {
1782 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1783 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1784
1785 SET_DYN_ARRAY(dyn, VP_SCISSORS, vp.scissors,
1786 firstScissor, scissorCount, pScissors);
1787 }
1788
1789 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)1790 vk_common_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,
1791 uint32_t scissorCount,
1792 const VkRect2D *pScissors)
1793 {
1794 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1795 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1796
1797 SET_DYN_VALUE(dyn, VP_SCISSOR_COUNT, vp.scissor_count, scissorCount);
1798 SET_DYN_ARRAY(dyn, VP_SCISSORS, vp.scissors, 0, scissorCount, pScissors);
1799 }
1800
1801 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer,uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VkRect2D * pDiscardRectangles)1802 vk_common_CmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer,
1803 uint32_t firstDiscardRectangle,
1804 uint32_t discardRectangleCount,
1805 const VkRect2D *pDiscardRectangles)
1806 {
1807 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1808 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1809
1810 SET_DYN_VALUE(dyn, DR_RECTANGLES, dr.rectangle_count, discardRectangleCount);
1811 SET_DYN_ARRAY(dyn, DR_RECTANGLES, dr.rectangles, firstDiscardRectangle,
1812 discardRectangleCount, pDiscardRectangles);
1813 }
1814
1815 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)1816 vk_common_CmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer,
1817 VkBool32 rasterizerDiscardEnable)
1818 {
1819 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1820 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1821
1822 SET_DYN_BOOL(dyn, RS_RASTERIZER_DISCARD_ENABLE,
1823 rs.rasterizer_discard_enable, rasterizerDiscardEnable);
1824 }
1825
1826 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)1827 vk_common_CmdSetCullMode(VkCommandBuffer commandBuffer,
1828 VkCullModeFlags cullMode)
1829 {
1830 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1831 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1832
1833 SET_DYN_VALUE(dyn, RS_CULL_MODE, rs.cull_mode, cullMode);
1834 }
1835
1836 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)1837 vk_common_CmdSetFrontFace(VkCommandBuffer commandBuffer,
1838 VkFrontFace frontFace)
1839 {
1840 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1841 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1842
1843 SET_DYN_VALUE(dyn, RS_FRONT_FACE, rs.front_face, frontFace);
1844 }
1845
1846 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)1847 vk_common_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,
1848 VkBool32 depthBiasEnable)
1849 {
1850 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1851 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1852
1853 SET_DYN_BOOL(dyn, RS_DEPTH_BIAS_ENABLE,
1854 rs.depth_bias.enable, depthBiasEnable);
1855 }
1856
1857 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)1858 vk_common_CmdSetDepthBias(VkCommandBuffer commandBuffer,
1859 float depthBiasConstantFactor,
1860 float depthBiasClamp,
1861 float depthBiasSlopeFactor)
1862 {
1863 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1864 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1865
1866 SET_DYN_VALUE(dyn, RS_DEPTH_BIAS_FACTORS,
1867 rs.depth_bias.constant, depthBiasConstantFactor);
1868 SET_DYN_VALUE(dyn, RS_DEPTH_BIAS_FACTORS,
1869 rs.depth_bias.clamp, depthBiasClamp);
1870 SET_DYN_VALUE(dyn, RS_DEPTH_BIAS_FACTORS,
1871 rs.depth_bias.slope, depthBiasSlopeFactor);
1872 }
1873
1874 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)1875 vk_common_CmdSetLineWidth(VkCommandBuffer commandBuffer,
1876 float lineWidth)
1877 {
1878 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1879 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1880
1881 SET_DYN_VALUE(dyn, RS_LINE_WIDTH, rs.line.width, lineWidth);
1882 }
1883
1884 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)1885 vk_common_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,
1886 uint32_t lineStippleFactor,
1887 uint16_t lineStipplePattern)
1888 {
1889 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1890 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1891
1892 SET_DYN_VALUE(dyn, RS_LINE_STIPPLE,
1893 rs.line.stipple.factor, lineStippleFactor);
1894 SET_DYN_VALUE(dyn, RS_LINE_STIPPLE,
1895 rs.line.stipple.pattern, lineStipplePattern);
1896 }
1897
1898 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetFragmentShadingRateKHR(VkCommandBuffer commandBuffer,const VkExtent2D * pFragmentSize,const VkFragmentShadingRateCombinerOpKHR combinerOps[2])1899 vk_common_CmdSetFragmentShadingRateKHR(VkCommandBuffer commandBuffer,
1900 const VkExtent2D *pFragmentSize,
1901 const VkFragmentShadingRateCombinerOpKHR combinerOps[2])
1902 {
1903 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1904 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1905
1906 SET_DYN_VALUE(dyn, FSR, fsr.fragment_size.width, pFragmentSize->width);
1907 SET_DYN_VALUE(dyn, FSR, fsr.fragment_size.height, pFragmentSize->height);
1908 SET_DYN_VALUE(dyn, FSR, fsr.combiner_ops[0], combinerOps[0]);
1909 SET_DYN_VALUE(dyn, FSR, fsr.combiner_ops[1], combinerOps[1]);
1910 }
1911
1912 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,const VkSampleLocationsInfoEXT * pSampleLocationsInfo)1913 vk_common_CmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
1914 const VkSampleLocationsInfoEXT *pSampleLocationsInfo)
1915 {
1916 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1917 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1918
1919 SET_DYN_VALUE(dyn, MS_SAMPLE_LOCATIONS,
1920 ms.sample_locations->per_pixel,
1921 pSampleLocationsInfo->sampleLocationsPerPixel);
1922 SET_DYN_VALUE(dyn, MS_SAMPLE_LOCATIONS,
1923 ms.sample_locations->grid_size.width,
1924 pSampleLocationsInfo->sampleLocationGridSize.width);
1925 SET_DYN_VALUE(dyn, MS_SAMPLE_LOCATIONS,
1926 ms.sample_locations->grid_size.height,
1927 pSampleLocationsInfo->sampleLocationGridSize.height);
1928
1929 assert(pSampleLocationsInfo->sampleLocationsCount ==
1930 pSampleLocationsInfo->sampleLocationsPerPixel *
1931 pSampleLocationsInfo->sampleLocationGridSize.width *
1932 pSampleLocationsInfo->sampleLocationGridSize.height);
1933
1934 assert(pSampleLocationsInfo->sampleLocationsCount <=
1935 MESA_VK_MAX_SAMPLE_LOCATIONS);
1936
1937 SET_DYN_ARRAY(dyn, MS_SAMPLE_LOCATIONS,
1938 ms.sample_locations->locations,
1939 0, pSampleLocationsInfo->sampleLocationsCount,
1940 pSampleLocationsInfo->pSampleLocations);
1941 }
1942
1943 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)1944 vk_common_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,
1945 VkBool32 depthTestEnable)
1946 {
1947 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1948 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1949
1950 SET_DYN_BOOL(dyn, DS_DEPTH_TEST_ENABLE,
1951 ds.depth.test_enable, depthTestEnable);
1952 }
1953
1954 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)1955 vk_common_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,
1956 VkBool32 depthWriteEnable)
1957 {
1958 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1959 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1960
1961 SET_DYN_BOOL(dyn, DS_DEPTH_WRITE_ENABLE,
1962 ds.depth.write_enable, depthWriteEnable);
1963 }
1964
1965 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)1966 vk_common_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,
1967 VkCompareOp depthCompareOp)
1968 {
1969 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1970 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1971
1972 SET_DYN_VALUE(dyn, DS_DEPTH_COMPARE_OP, ds.depth.compare_op,
1973 depthCompareOp);
1974 }
1975
1976 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)1977 vk_common_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,
1978 VkBool32 depthBoundsTestEnable)
1979 {
1980 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1981 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1982
1983 SET_DYN_BOOL(dyn, DS_DEPTH_BOUNDS_TEST_ENABLE,
1984 ds.depth.bounds_test.enable, depthBoundsTestEnable);
1985 }
1986
1987 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)1988 vk_common_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
1989 float minDepthBounds,
1990 float maxDepthBounds)
1991 {
1992 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
1993 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
1994
1995 SET_DYN_VALUE(dyn, DS_DEPTH_BOUNDS_TEST_BOUNDS,
1996 ds.depth.bounds_test.min, minDepthBounds);
1997 SET_DYN_VALUE(dyn, DS_DEPTH_BOUNDS_TEST_BOUNDS,
1998 ds.depth.bounds_test.max, maxDepthBounds);
1999 }
2000
2001 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)2002 vk_common_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,
2003 VkBool32 stencilTestEnable)
2004 {
2005 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2006 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2007
2008 SET_DYN_BOOL(dyn, DS_STENCIL_TEST_ENABLE,
2009 ds.stencil.test_enable, stencilTestEnable);
2010 }
2011
2012 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)2013 vk_common_CmdSetStencilOp(VkCommandBuffer commandBuffer,
2014 VkStencilFaceFlags faceMask,
2015 VkStencilOp failOp,
2016 VkStencilOp passOp,
2017 VkStencilOp depthFailOp,
2018 VkCompareOp compareOp)
2019 {
2020 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2021 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2022
2023 if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
2024 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.front.op.fail, failOp);
2025 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.front.op.pass, passOp);
2026 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.front.op.depth_fail, depthFailOp);
2027 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.front.op.compare, compareOp);
2028 }
2029
2030 if (faceMask & VK_STENCIL_FACE_BACK_BIT) {
2031 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.back.op.fail, failOp);
2032 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.back.op.pass, passOp);
2033 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.back.op.depth_fail, depthFailOp);
2034 SET_DYN_VALUE(dyn, DS_STENCIL_OP, ds.stencil.back.op.compare, compareOp);
2035 }
2036 }
2037
2038 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2039 vk_common_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
2040 VkStencilFaceFlags faceMask,
2041 uint32_t compareMask)
2042 {
2043 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2044 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2045
2046 /* We assume 8-bit stencil always */
2047 STATIC_ASSERT(sizeof(dyn->ds.stencil.front.write_mask) == 1);
2048
2049 if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
2050 SET_DYN_VALUE(dyn, DS_STENCIL_COMPARE_MASK,
2051 ds.stencil.front.compare_mask, (uint8_t)compareMask);
2052 }
2053 if (faceMask & VK_STENCIL_FACE_BACK_BIT) {
2054 SET_DYN_VALUE(dyn, DS_STENCIL_COMPARE_MASK,
2055 ds.stencil.back.compare_mask, (uint8_t)compareMask);
2056 }
2057 }
2058
2059 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2060 vk_common_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
2061 VkStencilFaceFlags faceMask,
2062 uint32_t writeMask)
2063 {
2064 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2065 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2066
2067 /* We assume 8-bit stencil always */
2068 STATIC_ASSERT(sizeof(dyn->ds.stencil.front.write_mask) == 1);
2069
2070 if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
2071 SET_DYN_VALUE(dyn, DS_STENCIL_WRITE_MASK,
2072 ds.stencil.front.write_mask, (uint8_t)writeMask);
2073 }
2074 if (faceMask & VK_STENCIL_FACE_BACK_BIT) {
2075 SET_DYN_VALUE(dyn, DS_STENCIL_WRITE_MASK,
2076 ds.stencil.back.write_mask, (uint8_t)writeMask);
2077 }
2078 }
2079
2080 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2081 vk_common_CmdSetStencilReference(VkCommandBuffer commandBuffer,
2082 VkStencilFaceFlags faceMask,
2083 uint32_t reference)
2084 {
2085 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2086 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2087
2088 /* We assume 8-bit stencil always */
2089 STATIC_ASSERT(sizeof(dyn->ds.stencil.front.write_mask) == 1);
2090
2091 if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
2092 SET_DYN_VALUE(dyn, DS_STENCIL_REFERENCE,
2093 ds.stencil.front.reference, (uint8_t)reference);
2094 }
2095 if (faceMask & VK_STENCIL_FACE_BACK_BIT) {
2096 SET_DYN_VALUE(dyn, DS_STENCIL_REFERENCE,
2097 ds.stencil.back.reference, (uint8_t)reference);
2098 }
2099 }
2100
2101 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer,VkLogicOp logicOp)2102 vk_common_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer,
2103 VkLogicOp logicOp)
2104 {
2105 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2106 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2107
2108 SET_DYN_VALUE(dyn, CB_LOGIC_OP, cb.logic_op, logicOp);
2109 }
2110
2111 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkBool32 * pColorWriteEnables)2112 vk_common_CmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer,
2113 uint32_t attachmentCount,
2114 const VkBool32 *pColorWriteEnables)
2115 {
2116 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2117 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2118
2119 assert(attachmentCount <= MESA_VK_MAX_COLOR_ATTACHMENTS);
2120
2121 uint8_t color_write_enables = 0;
2122 for (uint32_t a = 0; a < attachmentCount; a++) {
2123 if (pColorWriteEnables[a])
2124 color_write_enables |= BITFIELD_BIT(a);
2125 }
2126
2127 SET_DYN_VALUE(dyn, CB_COLOR_WRITE_ENABLES,
2128 cb.color_write_enables, color_write_enables);
2129 }
2130
2131 VKAPI_ATTR void VKAPI_CALL
vk_common_CmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2132 vk_common_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
2133 const float blendConstants[4])
2134 {
2135 VK_FROM_HANDLE(vk_command_buffer, cmd, commandBuffer);
2136 struct vk_dynamic_graphics_state *dyn = &cmd->dynamic_graphics_state;
2137
2138 SET_DYN_ARRAY(dyn, CB_BLEND_CONSTANTS, cb.blend_constants,
2139 0, 4, blendConstants);
2140 }
2141