1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_state.h"
25
26 #include "zink_context.h"
27 #include "zink_format.h"
28 #include "zink_program.h"
29 #include "zink_screen.h"
30
31 #include "compiler/shader_enums.h"
32 #include "util/u_dual_blend.h"
33 #include "util/u_memory.h"
34 #include "util/u_helpers.h"
35 #include "vulkan/util/vk_format.h"
36
37 #include <math.h>
38
39 static void *
zink_create_vertex_elements_state(struct pipe_context * pctx,unsigned num_elements,const struct pipe_vertex_element * elements)40 zink_create_vertex_elements_state(struct pipe_context *pctx,
41 unsigned num_elements,
42 const struct pipe_vertex_element *elements)
43 {
44 struct zink_screen *screen = zink_screen(pctx->screen);
45 unsigned int i;
46 struct zink_vertex_elements_state *ves = CALLOC_STRUCT(zink_vertex_elements_state);
47 if (!ves)
48 return NULL;
49 ves->hw_state.hash = _mesa_hash_pointer(ves);
50
51 int buffer_map[PIPE_MAX_ATTRIBS];
52 for (int i = 0; i < ARRAY_SIZE(buffer_map); ++i)
53 buffer_map[i] = -1;
54
55 int num_bindings = 0;
56 unsigned num_decomposed = 0;
57 uint32_t size8 = 0;
58 uint32_t size16 = 0;
59 uint32_t size32 = 0;
60 for (i = 0; i < num_elements; ++i) {
61 const struct pipe_vertex_element *elem = elements + i;
62
63 int binding = elem->vertex_buffer_index;
64 if (buffer_map[binding] < 0) {
65 ves->binding_map[num_bindings] = binding;
66 buffer_map[binding] = num_bindings++;
67 }
68 binding = buffer_map[binding];
69
70 ves->bindings[binding].binding = binding;
71 ves->bindings[binding].inputRate = elem->instance_divisor ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX;
72
73 assert(!elem->instance_divisor || zink_screen(pctx->screen)->info.have_EXT_vertex_attribute_divisor);
74 if (elem->instance_divisor > screen->info.vdiv_props.maxVertexAttribDivisor)
75 debug_printf("zink: clamping instance divisor %u to %u\n", elem->instance_divisor, screen->info.vdiv_props.maxVertexAttribDivisor);
76 ves->divisor[binding] = MIN2(elem->instance_divisor, screen->info.vdiv_props.maxVertexAttribDivisor);
77
78 VkFormat format;
79 if (screen->format_props[elem->src_format].bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT)
80 format = zink_get_format(screen, elem->src_format);
81 else {
82 enum pipe_format new_format = zink_decompose_vertex_format(elem->src_format);
83 assert(new_format);
84 num_decomposed++;
85 assert(screen->format_props[new_format].bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT);
86 if (util_format_get_blocksize(new_format) == 4)
87 size32 |= BITFIELD_BIT(i);
88 else if (util_format_get_blocksize(new_format) == 2)
89 size16 |= BITFIELD_BIT(i);
90 else
91 size8 |= BITFIELD_BIT(i);
92 format = zink_get_format(screen, new_format);
93 unsigned size;
94 if (i < 8)
95 size = 1;
96 else if (i < 16)
97 size = 2;
98 else
99 size = 4;
100 if (util_format_get_nr_components(elem->src_format) == 4) {
101 ves->decomposed_attrs |= BITFIELD_BIT(i);
102 ves->decomposed_attrs_size = size;
103 } else {
104 ves->decomposed_attrs_without_w |= BITFIELD_BIT(i);
105 ves->decomposed_attrs_without_w_size = size;
106 }
107 }
108
109 if (screen->info.have_EXT_vertex_input_dynamic_state) {
110 ves->hw_state.dynattribs[i].sType = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT;
111 ves->hw_state.dynattribs[i].binding = binding;
112 ves->hw_state.dynattribs[i].location = i;
113 ves->hw_state.dynattribs[i].format = format;
114 assert(ves->hw_state.dynattribs[i].format != VK_FORMAT_UNDEFINED);
115 ves->hw_state.dynattribs[i].offset = elem->src_offset;
116 } else {
117 ves->hw_state.attribs[i].binding = binding;
118 ves->hw_state.attribs[i].location = i;
119 ves->hw_state.attribs[i].format = format;
120 assert(ves->hw_state.attribs[i].format != VK_FORMAT_UNDEFINED);
121 ves->hw_state.attribs[i].offset = elem->src_offset;
122 ves->min_stride[binding] = MAX2(ves->min_stride[binding], elem->src_offset + vk_format_get_blocksize(format));
123 }
124 }
125 assert(num_decomposed + num_elements <= PIPE_MAX_ATTRIBS);
126 u_foreach_bit(i, ves->decomposed_attrs | ves->decomposed_attrs_without_w) {
127 const struct pipe_vertex_element *elem = elements + i;
128 const struct util_format_description *desc = util_format_description(elem->src_format);
129 unsigned size = 1;
130 if (size32 & BITFIELD_BIT(i))
131 size = 4;
132 else if (size16 & BITFIELD_BIT(i))
133 size = 2;
134 else
135 assert(size8 & BITFIELD_BIT(i));
136 for (unsigned j = 1; j < desc->nr_channels; j++) {
137 if (screen->info.have_EXT_vertex_input_dynamic_state) {
138 memcpy(&ves->hw_state.dynattribs[num_elements], &ves->hw_state.dynattribs[i], sizeof(VkVertexInputAttributeDescription2EXT));
139 ves->hw_state.dynattribs[num_elements].location = num_elements;
140 ves->hw_state.dynattribs[num_elements].offset += j * size;
141 } else {
142 memcpy(&ves->hw_state.attribs[num_elements], &ves->hw_state.attribs[i], sizeof(VkVertexInputAttributeDescription));
143 ves->hw_state.attribs[num_elements].location = num_elements;
144 ves->hw_state.attribs[num_elements].offset += j * size;
145 }
146 num_elements++;
147 }
148 }
149 ves->hw_state.num_bindings = num_bindings;
150 ves->hw_state.num_attribs = num_elements;
151 if (screen->info.have_EXT_vertex_input_dynamic_state) {
152 for (int i = 0; i < num_bindings; ++i) {
153 ves->hw_state.dynbindings[i].sType = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT;
154 ves->hw_state.dynbindings[i].binding = ves->bindings[i].binding;
155 ves->hw_state.dynbindings[i].inputRate = ves->bindings[i].inputRate;
156 if (ves->divisor[i])
157 ves->hw_state.dynbindings[i].divisor = ves->divisor[i];
158 else
159 ves->hw_state.dynbindings[i].divisor = 1;
160 }
161 } else {
162 for (int i = 0; i < num_bindings; ++i) {
163 ves->hw_state.b.bindings[i].binding = ves->bindings[i].binding;
164 ves->hw_state.b.bindings[i].inputRate = ves->bindings[i].inputRate;
165 if (ves->divisor[i]) {
166 ves->hw_state.b.divisors[ves->hw_state.b.divisors_present].divisor = ves->divisor[i];
167 ves->hw_state.b.divisors[ves->hw_state.b.divisors_present].binding = ves->bindings[i].binding;
168 ves->hw_state.b.divisors_present++;
169 }
170 }
171 }
172 return ves;
173 }
174
175 static void
zink_bind_vertex_elements_state(struct pipe_context * pctx,void * cso)176 zink_bind_vertex_elements_state(struct pipe_context *pctx,
177 void *cso)
178 {
179 struct zink_context *ctx = zink_context(pctx);
180 struct zink_gfx_pipeline_state *state = &ctx->gfx_pipeline_state;
181 ctx->element_state = cso;
182 if (cso) {
183 if (state->element_state != &ctx->element_state->hw_state) {
184 ctx->vertex_state_changed = !zink_screen(pctx->screen)->info.have_EXT_vertex_input_dynamic_state;
185 ctx->vertex_buffers_dirty = ctx->element_state->hw_state.num_bindings > 0;
186 }
187 const struct zink_vs_key *vs = zink_get_vs_key(ctx);
188 uint32_t decomposed_attrs = 0, decomposed_attrs_without_w = 0;
189 switch (vs->size) {
190 case 1:
191 decomposed_attrs = vs->u8.decomposed_attrs;
192 decomposed_attrs_without_w = vs->u8.decomposed_attrs_without_w;
193 break;
194 case 2:
195 decomposed_attrs = vs->u16.decomposed_attrs;
196 decomposed_attrs_without_w = vs->u16.decomposed_attrs_without_w;
197 break;
198 case 4:
199 decomposed_attrs = vs->u16.decomposed_attrs;
200 decomposed_attrs_without_w = vs->u16.decomposed_attrs_without_w;
201 break;
202 }
203 if (ctx->element_state->decomposed_attrs != decomposed_attrs ||
204 ctx->element_state->decomposed_attrs_without_w != decomposed_attrs_without_w) {
205 unsigned size = MAX2(ctx->element_state->decomposed_attrs_size, ctx->element_state->decomposed_attrs_without_w_size);
206 struct zink_shader_key *key = (struct zink_shader_key *)zink_set_vs_key(ctx);
207 key->size -= 2 * key->key.vs.size;
208 switch (size) {
209 case 1:
210 key->key.vs.u8.decomposed_attrs = ctx->element_state->decomposed_attrs;
211 key->key.vs.u8.decomposed_attrs_without_w = ctx->element_state->decomposed_attrs_without_w;
212 break;
213 case 2:
214 key->key.vs.u16.decomposed_attrs = ctx->element_state->decomposed_attrs;
215 key->key.vs.u16.decomposed_attrs_without_w = ctx->element_state->decomposed_attrs_without_w;
216 break;
217 case 4:
218 key->key.vs.u32.decomposed_attrs = ctx->element_state->decomposed_attrs;
219 key->key.vs.u32.decomposed_attrs_without_w = ctx->element_state->decomposed_attrs_without_w;
220 break;
221 default: break;
222 }
223 key->key.vs.size = size;
224 key->size += 2 * size;
225 }
226 state->element_state = &ctx->element_state->hw_state;
227 } else {
228 state->element_state = NULL;
229 ctx->vertex_buffers_dirty = false;
230 }
231 }
232
233 static void
zink_delete_vertex_elements_state(struct pipe_context * pctx,void * ves)234 zink_delete_vertex_elements_state(struct pipe_context *pctx,
235 void *ves)
236 {
237 FREE(ves);
238 }
239
240 static VkBlendFactor
blend_factor(enum pipe_blendfactor factor)241 blend_factor(enum pipe_blendfactor factor)
242 {
243 switch (factor) {
244 case PIPE_BLENDFACTOR_ONE: return VK_BLEND_FACTOR_ONE;
245 case PIPE_BLENDFACTOR_SRC_COLOR: return VK_BLEND_FACTOR_SRC_COLOR;
246 case PIPE_BLENDFACTOR_SRC_ALPHA: return VK_BLEND_FACTOR_SRC_ALPHA;
247 case PIPE_BLENDFACTOR_DST_ALPHA: return VK_BLEND_FACTOR_DST_ALPHA;
248 case PIPE_BLENDFACTOR_DST_COLOR: return VK_BLEND_FACTOR_DST_COLOR;
249 case PIPE_BLENDFACTOR_SRC_ALPHA_SATURATE:
250 return VK_BLEND_FACTOR_SRC_ALPHA_SATURATE;
251 case PIPE_BLENDFACTOR_CONST_COLOR: return VK_BLEND_FACTOR_CONSTANT_COLOR;
252 case PIPE_BLENDFACTOR_CONST_ALPHA: return VK_BLEND_FACTOR_CONSTANT_ALPHA;
253 case PIPE_BLENDFACTOR_SRC1_COLOR: return VK_BLEND_FACTOR_SRC1_COLOR;
254 case PIPE_BLENDFACTOR_SRC1_ALPHA: return VK_BLEND_FACTOR_SRC1_ALPHA;
255
256 case PIPE_BLENDFACTOR_ZERO: return VK_BLEND_FACTOR_ZERO;
257
258 case PIPE_BLENDFACTOR_INV_SRC_COLOR:
259 return VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
260 case PIPE_BLENDFACTOR_INV_SRC_ALPHA:
261 return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
262 case PIPE_BLENDFACTOR_INV_DST_ALPHA:
263 return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA;
264 case PIPE_BLENDFACTOR_INV_DST_COLOR:
265 return VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR;
266
267 case PIPE_BLENDFACTOR_INV_CONST_COLOR:
268 return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR;
269 case PIPE_BLENDFACTOR_INV_CONST_ALPHA:
270 return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA;
271 case PIPE_BLENDFACTOR_INV_SRC1_COLOR:
272 return VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR;
273 case PIPE_BLENDFACTOR_INV_SRC1_ALPHA:
274 return VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA;
275 }
276 unreachable("unexpected blend factor");
277 }
278
279
280 static bool
need_blend_constants(enum pipe_blendfactor factor)281 need_blend_constants(enum pipe_blendfactor factor)
282 {
283 switch (factor) {
284 case PIPE_BLENDFACTOR_CONST_COLOR:
285 case PIPE_BLENDFACTOR_CONST_ALPHA:
286 case PIPE_BLENDFACTOR_INV_CONST_COLOR:
287 case PIPE_BLENDFACTOR_INV_CONST_ALPHA:
288 return true;
289
290 default:
291 return false;
292 }
293 }
294
295 static VkBlendOp
blend_op(enum pipe_blend_func func)296 blend_op(enum pipe_blend_func func)
297 {
298 switch (func) {
299 case PIPE_BLEND_ADD: return VK_BLEND_OP_ADD;
300 case PIPE_BLEND_SUBTRACT: return VK_BLEND_OP_SUBTRACT;
301 case PIPE_BLEND_REVERSE_SUBTRACT: return VK_BLEND_OP_REVERSE_SUBTRACT;
302 case PIPE_BLEND_MIN: return VK_BLEND_OP_MIN;
303 case PIPE_BLEND_MAX: return VK_BLEND_OP_MAX;
304 }
305 unreachable("unexpected blend function");
306 }
307
308 static VkLogicOp
logic_op(enum pipe_logicop func)309 logic_op(enum pipe_logicop func)
310 {
311 switch (func) {
312 case PIPE_LOGICOP_CLEAR: return VK_LOGIC_OP_CLEAR;
313 case PIPE_LOGICOP_NOR: return VK_LOGIC_OP_NOR;
314 case PIPE_LOGICOP_AND_INVERTED: return VK_LOGIC_OP_AND_INVERTED;
315 case PIPE_LOGICOP_COPY_INVERTED: return VK_LOGIC_OP_COPY_INVERTED;
316 case PIPE_LOGICOP_AND_REVERSE: return VK_LOGIC_OP_AND_REVERSE;
317 case PIPE_LOGICOP_INVERT: return VK_LOGIC_OP_INVERT;
318 case PIPE_LOGICOP_XOR: return VK_LOGIC_OP_XOR;
319 case PIPE_LOGICOP_NAND: return VK_LOGIC_OP_NAND;
320 case PIPE_LOGICOP_AND: return VK_LOGIC_OP_AND;
321 case PIPE_LOGICOP_EQUIV: return VK_LOGIC_OP_EQUIVALENT;
322 case PIPE_LOGICOP_NOOP: return VK_LOGIC_OP_NO_OP;
323 case PIPE_LOGICOP_OR_INVERTED: return VK_LOGIC_OP_OR_INVERTED;
324 case PIPE_LOGICOP_COPY: return VK_LOGIC_OP_COPY;
325 case PIPE_LOGICOP_OR_REVERSE: return VK_LOGIC_OP_OR_REVERSE;
326 case PIPE_LOGICOP_OR: return VK_LOGIC_OP_OR;
327 case PIPE_LOGICOP_SET: return VK_LOGIC_OP_SET;
328 }
329 unreachable("unexpected logicop function");
330 }
331
332 /* from iris */
333 static enum pipe_blendfactor
fix_blendfactor(enum pipe_blendfactor f,bool alpha_to_one)334 fix_blendfactor(enum pipe_blendfactor f, bool alpha_to_one)
335 {
336 if (alpha_to_one) {
337 if (f == PIPE_BLENDFACTOR_SRC1_ALPHA)
338 return PIPE_BLENDFACTOR_ONE;
339
340 if (f == PIPE_BLENDFACTOR_INV_SRC1_ALPHA)
341 return PIPE_BLENDFACTOR_ZERO;
342 }
343
344 return f;
345 }
346
347 static void *
zink_create_blend_state(struct pipe_context * pctx,const struct pipe_blend_state * blend_state)348 zink_create_blend_state(struct pipe_context *pctx,
349 const struct pipe_blend_state *blend_state)
350 {
351 struct zink_blend_state *cso = CALLOC_STRUCT(zink_blend_state);
352 if (!cso)
353 return NULL;
354 cso->hash = _mesa_hash_pointer(cso);
355
356 if (blend_state->logicop_enable) {
357 cso->logicop_enable = VK_TRUE;
358 cso->logicop_func = logic_op(blend_state->logicop_func);
359 }
360
361 /* TODO: figure out what to do with dither (nothing is probably "OK" for now,
362 * as dithering is undefined in GL
363 */
364
365 /* TODO: these are multisampling-state, and should be set there instead of
366 * here, as that's closer tied to the update-frequency
367 */
368 cso->alpha_to_coverage = blend_state->alpha_to_coverage;
369 cso->alpha_to_one = blend_state->alpha_to_one;
370
371 cso->need_blend_constants = false;
372
373 for (int i = 0; i < blend_state->max_rt + 1; ++i) {
374 const struct pipe_rt_blend_state *rt = blend_state->rt;
375 if (blend_state->independent_blend_enable)
376 rt = blend_state->rt + i;
377
378 VkPipelineColorBlendAttachmentState att = {0};
379
380 if (rt->blend_enable) {
381 att.blendEnable = VK_TRUE;
382 att.srcColorBlendFactor = blend_factor(fix_blendfactor(rt->rgb_src_factor, cso->alpha_to_one));
383 att.dstColorBlendFactor = blend_factor(fix_blendfactor(rt->rgb_dst_factor, cso->alpha_to_one));
384 att.colorBlendOp = blend_op(rt->rgb_func);
385 att.srcAlphaBlendFactor = blend_factor(fix_blendfactor(rt->alpha_src_factor, cso->alpha_to_one));
386 att.dstAlphaBlendFactor = blend_factor(fix_blendfactor(rt->alpha_dst_factor, cso->alpha_to_one));
387 att.alphaBlendOp = blend_op(rt->alpha_func);
388
389 if (need_blend_constants(rt->rgb_src_factor) ||
390 need_blend_constants(rt->rgb_dst_factor) ||
391 need_blend_constants(rt->alpha_src_factor) ||
392 need_blend_constants(rt->alpha_dst_factor))
393 cso->need_blend_constants = true;
394 }
395
396 if (rt->colormask & PIPE_MASK_R)
397 att.colorWriteMask |= VK_COLOR_COMPONENT_R_BIT;
398 if (rt->colormask & PIPE_MASK_G)
399 att.colorWriteMask |= VK_COLOR_COMPONENT_G_BIT;
400 if (rt->colormask & PIPE_MASK_B)
401 att.colorWriteMask |= VK_COLOR_COMPONENT_B_BIT;
402 if (rt->colormask & PIPE_MASK_A)
403 att.colorWriteMask |= VK_COLOR_COMPONENT_A_BIT;
404
405 cso->attachments[i] = att;
406 }
407 cso->dual_src_blend = util_blend_state_is_dual(blend_state, 0);
408
409 return cso;
410 }
411
412 static void
zink_bind_blend_state(struct pipe_context * pctx,void * cso)413 zink_bind_blend_state(struct pipe_context *pctx, void *cso)
414 {
415 struct zink_context *ctx = zink_context(pctx);
416 struct zink_gfx_pipeline_state* state = &zink_context(pctx)->gfx_pipeline_state;
417 struct zink_blend_state *blend = cso;
418
419 if (state->blend_state != cso) {
420 state->blend_state = cso;
421 state->blend_id = blend ? blend->hash : 0;
422 state->dirty = true;
423 bool force_dual_color_blend = zink_screen(pctx->screen)->driconf.dual_color_blend_by_location &&
424 blend && blend->dual_src_blend && state->blend_state->attachments[0].blendEnable;
425 if (force_dual_color_blend != zink_get_fs_key(ctx)->force_dual_color_blend)
426 zink_set_fs_key(ctx)->force_dual_color_blend = force_dual_color_blend;
427 ctx->blend_state_changed = true;
428 }
429 }
430
431 static void
zink_delete_blend_state(struct pipe_context * pctx,void * blend_state)432 zink_delete_blend_state(struct pipe_context *pctx, void *blend_state)
433 {
434 FREE(blend_state);
435 }
436
437 static VkCompareOp
compare_op(enum pipe_compare_func func)438 compare_op(enum pipe_compare_func func)
439 {
440 switch (func) {
441 case PIPE_FUNC_NEVER: return VK_COMPARE_OP_NEVER;
442 case PIPE_FUNC_LESS: return VK_COMPARE_OP_LESS;
443 case PIPE_FUNC_EQUAL: return VK_COMPARE_OP_EQUAL;
444 case PIPE_FUNC_LEQUAL: return VK_COMPARE_OP_LESS_OR_EQUAL;
445 case PIPE_FUNC_GREATER: return VK_COMPARE_OP_GREATER;
446 case PIPE_FUNC_NOTEQUAL: return VK_COMPARE_OP_NOT_EQUAL;
447 case PIPE_FUNC_GEQUAL: return VK_COMPARE_OP_GREATER_OR_EQUAL;
448 case PIPE_FUNC_ALWAYS: return VK_COMPARE_OP_ALWAYS;
449 }
450 unreachable("unexpected func");
451 }
452
453 static VkStencilOp
stencil_op(enum pipe_stencil_op op)454 stencil_op(enum pipe_stencil_op op)
455 {
456 switch (op) {
457 case PIPE_STENCIL_OP_KEEP: return VK_STENCIL_OP_KEEP;
458 case PIPE_STENCIL_OP_ZERO: return VK_STENCIL_OP_ZERO;
459 case PIPE_STENCIL_OP_REPLACE: return VK_STENCIL_OP_REPLACE;
460 case PIPE_STENCIL_OP_INCR: return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
461 case PIPE_STENCIL_OP_DECR: return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
462 case PIPE_STENCIL_OP_INCR_WRAP: return VK_STENCIL_OP_INCREMENT_AND_WRAP;
463 case PIPE_STENCIL_OP_DECR_WRAP: return VK_STENCIL_OP_DECREMENT_AND_WRAP;
464 case PIPE_STENCIL_OP_INVERT: return VK_STENCIL_OP_INVERT;
465 }
466 unreachable("unexpected op");
467 }
468
469 static VkStencilOpState
stencil_op_state(const struct pipe_stencil_state * src)470 stencil_op_state(const struct pipe_stencil_state *src)
471 {
472 VkStencilOpState ret;
473 ret.failOp = stencil_op(src->fail_op);
474 ret.passOp = stencil_op(src->zpass_op);
475 ret.depthFailOp = stencil_op(src->zfail_op);
476 ret.compareOp = compare_op(src->func);
477 ret.compareMask = src->valuemask;
478 ret.writeMask = src->writemask;
479 ret.reference = 0; // not used: we'll use a dynamic state for this
480 return ret;
481 }
482
483 static void *
zink_create_depth_stencil_alpha_state(struct pipe_context * pctx,const struct pipe_depth_stencil_alpha_state * depth_stencil_alpha)484 zink_create_depth_stencil_alpha_state(struct pipe_context *pctx,
485 const struct pipe_depth_stencil_alpha_state *depth_stencil_alpha)
486 {
487 struct zink_depth_stencil_alpha_state *cso = CALLOC_STRUCT(zink_depth_stencil_alpha_state);
488 if (!cso)
489 return NULL;
490
491 cso->base = *depth_stencil_alpha;
492
493 if (depth_stencil_alpha->depth_enabled) {
494 cso->hw_state.depth_test = VK_TRUE;
495 cso->hw_state.depth_compare_op = compare_op(depth_stencil_alpha->depth_func);
496 }
497
498 if (depth_stencil_alpha->depth_bounds_test) {
499 cso->hw_state.depth_bounds_test = VK_TRUE;
500 cso->hw_state.min_depth_bounds = depth_stencil_alpha->depth_bounds_min;
501 cso->hw_state.max_depth_bounds = depth_stencil_alpha->depth_bounds_max;
502 }
503
504 if (depth_stencil_alpha->stencil[0].enabled) {
505 cso->hw_state.stencil_test = VK_TRUE;
506 cso->hw_state.stencil_front = stencil_op_state(depth_stencil_alpha->stencil);
507 }
508
509 if (depth_stencil_alpha->stencil[1].enabled)
510 cso->hw_state.stencil_back = stencil_op_state(depth_stencil_alpha->stencil + 1);
511 else
512 cso->hw_state.stencil_back = cso->hw_state.stencil_front;
513
514 cso->hw_state.depth_write = depth_stencil_alpha->depth_writemask;
515
516 return cso;
517 }
518
519 static void
zink_bind_depth_stencil_alpha_state(struct pipe_context * pctx,void * cso)520 zink_bind_depth_stencil_alpha_state(struct pipe_context *pctx, void *cso)
521 {
522 struct zink_context *ctx = zink_context(pctx);
523
524 bool prev_zwrite = ctx->dsa_state ? ctx->dsa_state->hw_state.depth_write : false;
525 ctx->dsa_state = cso;
526
527 if (cso) {
528 struct zink_gfx_pipeline_state *state = &ctx->gfx_pipeline_state;
529 if (state->dyn_state1.depth_stencil_alpha_state != &ctx->dsa_state->hw_state) {
530 state->dyn_state1.depth_stencil_alpha_state = &ctx->dsa_state->hw_state;
531 state->dirty |= !zink_screen(pctx->screen)->info.have_EXT_extended_dynamic_state;
532 ctx->dsa_state_changed = true;
533 }
534 }
535 if (prev_zwrite != (ctx->dsa_state ? ctx->dsa_state->hw_state.depth_write : false)) {
536 /* flag renderpass for re-check on next draw */
537 ctx->rp_layout_changed = true;
538 }
539 }
540
541 static void
zink_delete_depth_stencil_alpha_state(struct pipe_context * pctx,void * depth_stencil_alpha)542 zink_delete_depth_stencil_alpha_state(struct pipe_context *pctx,
543 void *depth_stencil_alpha)
544 {
545 FREE(depth_stencil_alpha);
546 }
547
548 static float
round_to_granularity(float value,float granularity)549 round_to_granularity(float value, float granularity)
550 {
551 return roundf(value / granularity) * granularity;
552 }
553
554 static float
line_width(float width,float granularity,const float range[2])555 line_width(float width, float granularity, const float range[2])
556 {
557 assert(granularity >= 0);
558 assert(range[0] <= range[1]);
559
560 if (granularity > 0)
561 width = round_to_granularity(width, granularity);
562
563 return CLAMP(width, range[0], range[1]);
564 }
565
566 static void *
zink_create_rasterizer_state(struct pipe_context * pctx,const struct pipe_rasterizer_state * rs_state)567 zink_create_rasterizer_state(struct pipe_context *pctx,
568 const struct pipe_rasterizer_state *rs_state)
569 {
570 struct zink_screen *screen = zink_screen(pctx->screen);
571
572 struct zink_rasterizer_state *state = CALLOC_STRUCT(zink_rasterizer_state);
573 if (!state)
574 return NULL;
575
576 state->base = *rs_state;
577 state->base.line_stipple_factor++;
578 state->hw_state.line_stipple_enable = rs_state->line_stipple_enable;
579
580 assert(rs_state->depth_clip_far == rs_state->depth_clip_near);
581 state->hw_state.depth_clip = rs_state->depth_clip_near;
582 state->hw_state.force_persample_interp = rs_state->force_persample_interp;
583 state->hw_state.pv_last = !rs_state->flatshade_first;
584 state->hw_state.clip_halfz = rs_state->clip_halfz;
585
586 assert(rs_state->fill_front <= PIPE_POLYGON_MODE_POINT);
587 if (rs_state->fill_back != rs_state->fill_front)
588 debug_printf("BUG: vulkan doesn't support different front and back fill modes\n");
589 state->hw_state.polygon_mode = rs_state->fill_front; // same values
590 state->cull_mode = rs_state->cull_face; // same bits
591
592 state->front_face = rs_state->front_ccw ?
593 VK_FRONT_FACE_COUNTER_CLOCKWISE :
594 VK_FRONT_FACE_CLOCKWISE;
595
596 state->hw_state.line_mode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
597 if (rs_state->line_rectangular) {
598 if (rs_state->line_smooth)
599 state->hw_state.line_mode = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT;
600 else
601 state->hw_state.line_mode = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT;
602 } else {
603 state->hw_state.line_mode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
604 }
605
606 if (!rs_state->line_stipple_enable) {
607 state->base.line_stipple_factor = 1;
608 state->base.line_stipple_pattern = UINT16_MAX;
609 }
610
611 state->offset_point = rs_state->offset_point;
612 state->offset_line = rs_state->offset_line;
613 state->offset_tri = rs_state->offset_tri;
614 state->offset_units = rs_state->offset_units;
615 if (!rs_state->offset_units_unscaled)
616 state->offset_units *= 2;
617 state->offset_clamp = rs_state->offset_clamp;
618 state->offset_scale = rs_state->offset_scale;
619
620 state->line_width = line_width(rs_state->line_width,
621 screen->info.props.limits.lineWidthGranularity,
622 screen->info.props.limits.lineWidthRange);
623
624 return state;
625 }
626
627 static void
zink_bind_rasterizer_state(struct pipe_context * pctx,void * cso)628 zink_bind_rasterizer_state(struct pipe_context *pctx, void *cso)
629 {
630 struct zink_context *ctx = zink_context(pctx);
631 struct zink_screen *screen = zink_screen(pctx->screen);
632 bool point_quad_rasterization = ctx->rast_state ? ctx->rast_state->base.point_quad_rasterization : false;
633 bool scissor = ctx->rast_state ? ctx->rast_state->base.scissor : false;
634 bool pv_last = ctx->rast_state ? ctx->rast_state->hw_state.pv_last : false;
635 bool force_persample_interp = ctx->rast_state ? ctx->rast_state->hw_state.force_persample_interp : false;
636 bool clip_halfz = ctx->rast_state ? ctx->rast_state->hw_state.clip_halfz : false;
637 bool rasterizer_discard = ctx->rast_state ? ctx->rast_state->base.rasterizer_discard : false;
638 bool half_pixel_center = ctx->rast_state ? ctx->rast_state->base.half_pixel_center : true;
639 ctx->rast_state = cso;
640
641 if (ctx->rast_state) {
642 if (screen->info.have_EXT_provoking_vertex &&
643 pv_last != ctx->rast_state->hw_state.pv_last &&
644 /* without this prop, change in pv mode requires new rp */
645 !screen->info.pv_props.provokingVertexModePerPipeline)
646 zink_batch_no_rp(ctx);
647 uint32_t rast_bits = 0;
648 memcpy(&rast_bits, &ctx->rast_state->hw_state, sizeof(struct zink_rasterizer_hw_state));
649 ctx->gfx_pipeline_state.rast_state = rast_bits & BITFIELD_MASK(ZINK_RAST_HW_STATE_SIZE);
650
651 ctx->gfx_pipeline_state.dirty = true;
652 ctx->rast_state_changed = true;
653
654 if (clip_halfz != ctx->rast_state->base.clip_halfz) {
655 if (!screen->driver_workarounds.depth_clip_control_missing)
656 ctx->gfx_pipeline_state.dirty = true;
657 else
658 zink_set_last_vertex_key(ctx)->clip_halfz = ctx->rast_state->base.clip_halfz;
659 ctx->vp_state_changed = true;
660 }
661
662 if (ctx->gfx_pipeline_state.dyn_state1.front_face != ctx->rast_state->front_face) {
663 ctx->gfx_pipeline_state.dyn_state1.front_face = ctx->rast_state->front_face;
664 ctx->gfx_pipeline_state.dirty |= !zink_screen(pctx->screen)->info.have_EXT_extended_dynamic_state;
665 }
666 if (ctx->gfx_pipeline_state.dyn_state1.cull_mode != ctx->rast_state->cull_mode) {
667 ctx->gfx_pipeline_state.dyn_state1.cull_mode = ctx->rast_state->cull_mode;
668 ctx->gfx_pipeline_state.dirty |= !zink_screen(pctx->screen)->info.have_EXT_extended_dynamic_state;
669 }
670 if (!ctx->primitives_generated_active)
671 zink_set_rasterizer_discard(ctx, false);
672 else if (rasterizer_discard != ctx->rast_state->base.rasterizer_discard)
673 zink_set_color_write_enables(ctx);
674
675 if (ctx->rast_state->base.point_quad_rasterization != point_quad_rasterization)
676 zink_set_fs_point_coord_key(ctx);
677 if (ctx->rast_state->base.scissor != scissor)
678 ctx->scissor_changed = true;
679
680 if (ctx->rast_state->base.force_persample_interp != force_persample_interp)
681 zink_set_fs_key(ctx)->force_persample_interp = ctx->rast_state->base.force_persample_interp;
682
683 if (ctx->rast_state->base.half_pixel_center != half_pixel_center)
684 ctx->vp_state_changed = true;
685 }
686 }
687
688 static void
zink_delete_rasterizer_state(struct pipe_context * pctx,void * rs_state)689 zink_delete_rasterizer_state(struct pipe_context *pctx, void *rs_state)
690 {
691 FREE(rs_state);
692 }
693
694 struct pipe_vertex_state *
zink_create_vertex_state(struct pipe_screen * pscreen,struct pipe_vertex_buffer * buffer,const struct pipe_vertex_element * elements,unsigned num_elements,struct pipe_resource * indexbuf,uint32_t full_velem_mask)695 zink_create_vertex_state(struct pipe_screen *pscreen,
696 struct pipe_vertex_buffer *buffer,
697 const struct pipe_vertex_element *elements,
698 unsigned num_elements,
699 struct pipe_resource *indexbuf,
700 uint32_t full_velem_mask)
701 {
702 struct zink_vertex_state *zstate = CALLOC_STRUCT(zink_vertex_state);
703 _mesa_set_init(&zstate->masks, NULL, NULL, _mesa_key_pointer_equal);
704
705 util_init_pipe_vertex_state(pscreen, buffer, elements, num_elements, indexbuf, full_velem_mask,
706 &zstate->b);
707
708 /* Initialize the vertex element state in state->element.
709 * Do it by creating a vertex element state object and copying it there.
710 */
711 struct zink_context ctx;
712 ctx.base.screen = pscreen;
713 struct zink_vertex_elements_state *elems = zink_create_vertex_elements_state(&ctx.base, num_elements, elements);
714 for (unsigned i = 0; i < elems->hw_state.num_bindings; i++) {
715 if (zink_screen(pscreen)->info.have_EXT_vertex_input_dynamic_state)
716 elems->hw_state.dynbindings[i].stride = buffer->stride;
717 }
718 zstate->velems = *elems;
719 zink_delete_vertex_elements_state(&ctx.base, elems);
720
721 return &zstate->b;
722 }
723
724 void
zink_vertex_state_destroy(struct pipe_screen * pscreen,struct pipe_vertex_state * vstate)725 zink_vertex_state_destroy(struct pipe_screen *pscreen, struct pipe_vertex_state *vstate)
726 {
727 struct zink_vertex_state *zstate = (struct zink_vertex_state *)vstate;
728 ralloc_free(zstate->masks.table);
729 pipe_vertex_buffer_unreference(&vstate->input.vbuffer);
730 pipe_resource_reference(&vstate->input.indexbuf, NULL);
731 FREE(vstate);
732 }
733
734 const struct zink_vertex_elements_hw_state *
zink_vertex_state_mask(struct pipe_vertex_state * vstate,uint32_t partial_velem_mask,bool have_EXT_vertex_input_dynamic_state)735 zink_vertex_state_mask(struct pipe_vertex_state *vstate, uint32_t partial_velem_mask, bool have_EXT_vertex_input_dynamic_state)
736 {
737 struct zink_vertex_state *zstate = (struct zink_vertex_state *)vstate;
738
739 if (partial_velem_mask == vstate->input.full_velem_mask)
740 return &zstate->velems.hw_state;
741 struct set_entry *he = _mesa_set_search_pre_hashed(&zstate->masks, partial_velem_mask, (void*)(uintptr_t)partial_velem_mask);
742 if (he)
743 return he->key;
744
745 struct zink_vertex_elements_hw_state *hw_state = rzalloc(zstate->masks.table, struct zink_vertex_elements_hw_state);
746 unsigned i = 0;
747 if (have_EXT_vertex_input_dynamic_state) {
748 u_foreach_bit(elem, vstate->input.full_velem_mask & partial_velem_mask) {
749 unsigned idx = util_bitcount(vstate->input.full_velem_mask & BITFIELD_MASK(elem));
750 hw_state->dynattribs[i] = zstate->velems.hw_state.dynattribs[idx];
751 hw_state->dynattribs[i].location = i;
752 i++;
753 }
754 memcpy(hw_state->dynbindings, zstate->velems.hw_state.dynbindings,
755 zstate->velems.hw_state.num_bindings * sizeof(VkVertexInputBindingDescription2EXT));
756 } else {
757 }
758 hw_state->num_attribs = i;
759 hw_state->num_bindings = zstate->velems.hw_state.num_bindings;
760 _mesa_set_add_pre_hashed(&zstate->masks, partial_velem_mask, hw_state);
761 return hw_state;
762 }
763
764 struct pipe_vertex_state *
zink_cache_create_vertex_state(struct pipe_screen * pscreen,struct pipe_vertex_buffer * buffer,const struct pipe_vertex_element * elements,unsigned num_elements,struct pipe_resource * indexbuf,uint32_t full_velem_mask)765 zink_cache_create_vertex_state(struct pipe_screen *pscreen,
766 struct pipe_vertex_buffer *buffer,
767 const struct pipe_vertex_element *elements,
768 unsigned num_elements,
769 struct pipe_resource *indexbuf,
770 uint32_t full_velem_mask)
771 {
772 struct zink_screen *screen = zink_screen(pscreen);
773
774 return util_vertex_state_cache_get(pscreen, buffer, elements, num_elements, indexbuf,
775 full_velem_mask, &screen->vertex_state_cache);
776 }
777
778 void
zink_cache_vertex_state_destroy(struct pipe_screen * pscreen,struct pipe_vertex_state * vstate)779 zink_cache_vertex_state_destroy(struct pipe_screen *pscreen, struct pipe_vertex_state *vstate)
780 {
781 struct zink_screen *screen = zink_screen(pscreen);
782
783 util_vertex_state_destroy(pscreen, &screen->vertex_state_cache, vstate);
784 }
785
786 void
zink_context_state_init(struct pipe_context * pctx)787 zink_context_state_init(struct pipe_context *pctx)
788 {
789 pctx->create_vertex_elements_state = zink_create_vertex_elements_state;
790 pctx->bind_vertex_elements_state = zink_bind_vertex_elements_state;
791 pctx->delete_vertex_elements_state = zink_delete_vertex_elements_state;
792
793 pctx->create_blend_state = zink_create_blend_state;
794 pctx->bind_blend_state = zink_bind_blend_state;
795 pctx->delete_blend_state = zink_delete_blend_state;
796
797 pctx->create_depth_stencil_alpha_state = zink_create_depth_stencil_alpha_state;
798 pctx->bind_depth_stencil_alpha_state = zink_bind_depth_stencil_alpha_state;
799 pctx->delete_depth_stencil_alpha_state = zink_delete_depth_stencil_alpha_state;
800
801 pctx->create_rasterizer_state = zink_create_rasterizer_state;
802 pctx->bind_rasterizer_state = zink_bind_rasterizer_state;
803 pctx->delete_rasterizer_state = zink_delete_rasterizer_state;
804 }
805