• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #include "zink_compiler.h"
2 #include "zink_context.h"
3 #include "zink_program.h"
4 #include "zink_query.h"
5 #include "zink_resource.h"
6 #include "zink_screen.h"
7 #include "zink_state.h"
8 
9 #include "indices/u_primconvert.h"
10 #include "util/hash_table.h"
11 #include "util/u_debug.h"
12 #include "util/u_helpers.h"
13 #include "util/u_inlines.h"
14 #include "util/u_prim.h"
15 #include "util/u_prim_restart.h"
16 
17 static VkDescriptorSet
allocate_descriptor_set(struct zink_screen * screen,struct zink_batch * batch,struct zink_gfx_program * prog)18 allocate_descriptor_set(struct zink_screen *screen,
19                         struct zink_batch *batch,
20                         struct zink_gfx_program *prog)
21 {
22    assert(batch->descs_left >= prog->num_descriptors);
23    VkDescriptorSetAllocateInfo dsai;
24    memset((void *)&dsai, 0, sizeof(dsai));
25    dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
26    dsai.pNext = NULL;
27    dsai.descriptorPool = batch->descpool;
28    dsai.descriptorSetCount = 1;
29    dsai.pSetLayouts = &prog->dsl;
30 
31    VkDescriptorSet desc_set;
32    if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
33       debug_printf("ZINK: failed to allocate descriptor set :/");
34       return VK_NULL_HANDLE;
35    }
36 
37    batch->descs_left -= prog->num_descriptors;
38    return desc_set;
39 }
40 
41 static void
zink_emit_xfb_counter_barrier(struct zink_context * ctx)42 zink_emit_xfb_counter_barrier(struct zink_context *ctx)
43 {
44    /* Between the pause and resume there needs to be a memory barrier for the counter buffers
45     * with a source access of VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT
46     * at pipeline stage VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT
47     * to a destination access of VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT
48     * at pipeline stage VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT.
49     *
50     * - from VK_EXT_transform_feedback spec
51     */
52    VkBufferMemoryBarrier barriers[PIPE_MAX_SO_OUTPUTS] = {};
53    unsigned barrier_count = 0;
54 
55    for (unsigned i = 0; i < ctx->num_so_targets; i++) {
56       struct zink_so_target *t = zink_so_target(ctx->so_targets[i]);
57       if (t->counter_buffer_valid) {
58           barriers[i].sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
59           barriers[i].srcAccessMask = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT;
60           barriers[i].dstAccessMask = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT;
61           barriers[i].buffer = zink_resource(t->counter_buffer)->buffer;
62           barriers[i].size = VK_WHOLE_SIZE;
63           barrier_count++;
64       }
65    }
66    struct zink_batch *batch = zink_batch_no_rp(ctx);
67    vkCmdPipelineBarrier(batch->cmdbuf,
68       VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
69       VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
70       0,
71       0, NULL,
72       barrier_count, barriers,
73       0, NULL
74    );
75    ctx->xfb_barrier = false;
76 }
77 
78 static void
zink_emit_xfb_vertex_input_barrier(struct zink_context * ctx,struct zink_resource * res)79 zink_emit_xfb_vertex_input_barrier(struct zink_context *ctx, struct zink_resource *res)
80 {
81    /* A pipeline barrier is required between using the buffers as
82     * transform feedback buffers and vertex buffers to
83     * ensure all writes to the transform feedback buffers are visible
84     * when the data is read as vertex attributes.
85     * The source access is VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT
86     * and the destination access is VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
87     * for the pipeline stages VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT
88     * and VK_PIPELINE_STAGE_VERTEX_INPUT_BIT respectively.
89     *
90     * - 20.3.1. Drawing Transform Feedback
91     */
92    VkBufferMemoryBarrier barriers[1] = {};
93    barriers[0].sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
94    barriers[0].srcAccessMask = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT;
95    barriers[0].dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
96    barriers[0].buffer = res->buffer;
97    barriers[0].size = VK_WHOLE_SIZE;
98    struct zink_batch *batch = zink_batch_no_rp(ctx);
99    zink_batch_reference_resource_rw(batch, res, false);
100    vkCmdPipelineBarrier(batch->cmdbuf,
101       VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
102       VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
103       0,
104       0, NULL,
105       ARRAY_SIZE(barriers), barriers,
106       0, NULL
107    );
108    res->needs_xfb_barrier = false;
109 }
110 
111 static void
zink_emit_stream_output_targets(struct pipe_context * pctx)112 zink_emit_stream_output_targets(struct pipe_context *pctx)
113 {
114    struct zink_context *ctx = zink_context(pctx);
115    struct zink_screen *screen = zink_screen(pctx->screen);
116    struct zink_batch *batch = zink_curr_batch(ctx);
117    VkBuffer buffers[PIPE_MAX_SO_OUTPUTS];
118    VkDeviceSize buffer_offsets[PIPE_MAX_SO_OUTPUTS];
119    VkDeviceSize buffer_sizes[PIPE_MAX_SO_OUTPUTS];
120 
121    for (unsigned i = 0; i < ctx->num_so_targets; i++) {
122       struct zink_so_target *t = (struct zink_so_target *)ctx->so_targets[i];
123       buffers[i] = zink_resource(t->base.buffer)->buffer;
124       zink_batch_reference_resource_rw(batch, zink_resource(t->base.buffer), true);
125       buffer_offsets[i] = t->base.buffer_offset;
126       buffer_sizes[i] = t->base.buffer_size;
127    }
128 
129    screen->vk_CmdBindTransformFeedbackBuffersEXT(batch->cmdbuf, 0, ctx->num_so_targets,
130                                                  buffers, buffer_offsets,
131                                                  buffer_sizes);
132    ctx->dirty_so_targets = false;
133 }
134 
135 static void
zink_bind_vertex_buffers(struct zink_batch * batch,struct zink_context * ctx)136 zink_bind_vertex_buffers(struct zink_batch *batch, struct zink_context *ctx)
137 {
138    VkBuffer buffers[PIPE_MAX_ATTRIBS];
139    VkDeviceSize buffer_offsets[PIPE_MAX_ATTRIBS];
140    const struct zink_vertex_elements_state *elems = ctx->element_state;
141    for (unsigned i = 0; i < elems->hw_state.num_bindings; i++) {
142       struct pipe_vertex_buffer *vb = ctx->buffers + ctx->element_state->binding_map[i];
143       assert(vb);
144       if (vb->buffer.resource) {
145          struct zink_resource *res = zink_resource(vb->buffer.resource);
146          buffers[i] = res->buffer;
147          buffer_offsets[i] = vb->buffer_offset;
148          zink_batch_reference_resource_rw(batch, res, false);
149       } else {
150          buffers[i] = zink_resource(ctx->dummy_buffer)->buffer;
151          buffer_offsets[i] = 0;
152       }
153    }
154 
155    if (elems->hw_state.num_bindings > 0)
156       vkCmdBindVertexBuffers(batch->cmdbuf, 0,
157                              elems->hw_state.num_bindings,
158                              buffers, buffer_offsets);
159 }
160 
161 static struct zink_gfx_program *
get_gfx_program(struct zink_context * ctx)162 get_gfx_program(struct zink_context *ctx)
163 {
164    if (ctx->dirty_shader_stages) {
165       struct hash_entry *entry = _mesa_hash_table_search(ctx->program_cache,
166                                                          ctx->gfx_stages);
167       if (!entry) {
168          struct zink_gfx_program *prog;
169          prog = zink_create_gfx_program(ctx, ctx->gfx_stages);
170          entry = _mesa_hash_table_insert(ctx->program_cache, prog->shaders, prog);
171          if (!entry)
172             return NULL;
173       }
174       ctx->curr_program = entry->data;
175       ctx->dirty_shader_stages = 0;
176    }
177 
178    assert(ctx->curr_program);
179    return ctx->curr_program;
180 }
181 
182 static bool
line_width_needed(enum pipe_prim_type reduced_prim,VkPolygonMode polygon_mode)183 line_width_needed(enum pipe_prim_type reduced_prim,
184                   VkPolygonMode polygon_mode)
185 {
186    switch (reduced_prim) {
187    case PIPE_PRIM_POINTS:
188       return false;
189 
190    case PIPE_PRIM_LINES:
191       return true;
192 
193    case PIPE_PRIM_TRIANGLES:
194       return polygon_mode == VK_POLYGON_MODE_LINE;
195 
196    default:
197       unreachable("unexpected reduced prim");
198    }
199 }
200 
201 static inline bool
restart_supported(enum pipe_prim_type mode)202 restart_supported(enum pipe_prim_type mode)
203 {
204     return mode == PIPE_PRIM_LINE_STRIP || mode == PIPE_PRIM_TRIANGLE_STRIP || mode == PIPE_PRIM_TRIANGLE_FAN;
205 }
206 
207 void
zink_draw_vbo(struct pipe_context * pctx,const struct pipe_draw_info * dinfo)208 zink_draw_vbo(struct pipe_context *pctx,
209               const struct pipe_draw_info *dinfo)
210 {
211    struct zink_context *ctx = zink_context(pctx);
212    struct zink_screen *screen = zink_screen(pctx->screen);
213    struct zink_rasterizer_state *rast_state = ctx->rast_state;
214    struct zink_depth_stencil_alpha_state *dsa_state = ctx->dsa_state;
215    struct zink_so_target *so_target = zink_so_target(dinfo->count_from_stream_output);
216    VkBuffer counter_buffers[PIPE_MAX_SO_OUTPUTS];
217    VkDeviceSize counter_buffer_offsets[PIPE_MAX_SO_OUTPUTS] = {};
218    bool need_index_buffer_unref = false;
219 
220 
221    if (dinfo->primitive_restart && !restart_supported(dinfo->mode)) {
222        util_draw_vbo_without_prim_restart(pctx, dinfo);
223        return;
224    }
225    if (dinfo->mode == PIPE_PRIM_QUADS ||
226        dinfo->mode == PIPE_PRIM_QUAD_STRIP ||
227        dinfo->mode == PIPE_PRIM_POLYGON ||
228        (dinfo->mode == PIPE_PRIM_TRIANGLE_FAN && !screen->have_triangle_fans) ||
229        dinfo->mode == PIPE_PRIM_LINE_LOOP) {
230       if (!u_trim_pipe_prim(dinfo->mode, (unsigned *)&dinfo->count))
231          return;
232 
233       util_primconvert_save_rasterizer_state(ctx->primconvert, &rast_state->base);
234       util_primconvert_draw_vbo(ctx->primconvert, dinfo);
235       return;
236    }
237 
238    struct zink_gfx_program *gfx_program = get_gfx_program(ctx);
239    if (!gfx_program)
240       return;
241 
242    if (ctx->gfx_pipeline_state.primitive_restart != !!dinfo->primitive_restart)
243       ctx->gfx_pipeline_state.hash = 0;
244    ctx->gfx_pipeline_state.primitive_restart = !!dinfo->primitive_restart;
245 
246    VkPipeline pipeline = zink_get_gfx_pipeline(screen, gfx_program,
247                                                &ctx->gfx_pipeline_state,
248                                                dinfo->mode);
249 
250    enum pipe_prim_type reduced_prim = u_reduced_prim(dinfo->mode);
251 
252    bool depth_bias = false;
253    switch (reduced_prim) {
254    case PIPE_PRIM_POINTS:
255       depth_bias = rast_state->offset_point;
256       break;
257 
258    case PIPE_PRIM_LINES:
259       depth_bias = rast_state->offset_line;
260       break;
261 
262    case PIPE_PRIM_TRIANGLES:
263       depth_bias = rast_state->offset_tri;
264       break;
265 
266    default:
267       unreachable("unexpected reduced prim");
268    }
269 
270    unsigned index_offset = 0;
271    struct pipe_resource *index_buffer = NULL;
272    if (dinfo->index_size > 0) {
273        uint32_t restart_index = util_prim_restart_index_from_size(dinfo->index_size);
274        if ((dinfo->primitive_restart && (dinfo->restart_index != restart_index)) ||
275            (!screen->info.have_EXT_index_type_uint8 && dinfo->index_size == 1)) {
276           util_translate_prim_restart_ib(pctx, dinfo, &index_buffer);
277           need_index_buffer_unref = true;
278        } else {
279           if (dinfo->has_user_indices) {
280              if (!util_upload_index_buffer(pctx, dinfo, &index_buffer, &index_offset, 4)) {
281                 debug_printf("util_upload_index_buffer() failed\n");
282                 return;
283              }
284           } else
285              index_buffer = dinfo->index.resource;
286        }
287    }
288 
289    VkWriteDescriptorSet wds[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS + PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
290    struct zink_resource *write_desc_resources[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS + PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
291    VkDescriptorBufferInfo buffer_infos[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS];
292    VkDescriptorImageInfo image_infos[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
293    VkBufferView buffer_view[] = {VK_NULL_HANDLE};
294    int num_wds = 0, num_buffer_info = 0, num_image_info = 0;
295 
296    struct zink_resource *transitions[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
297    int num_transitions = 0;
298 
299    for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
300       struct zink_shader *shader = ctx->gfx_stages[i];
301       if (!shader)
302          continue;
303 
304       if (ctx->num_so_targets &&
305           (i == PIPE_SHADER_GEOMETRY ||
306           (i == PIPE_SHADER_TESS_EVAL && !ctx->gfx_stages[PIPE_SHADER_GEOMETRY]) ||
307           (i == PIPE_SHADER_VERTEX && !ctx->gfx_stages[PIPE_SHADER_GEOMETRY] && !ctx->gfx_stages[PIPE_SHADER_TESS_EVAL]))) {
308          for (unsigned i = 0; i < ctx->num_so_targets; i++) {
309             struct zink_so_target *t = zink_so_target(ctx->so_targets[i]);
310             t->stride = shader->streamout.so_info.stride[i] * sizeof(uint32_t);
311          }
312       }
313 
314       for (int j = 0; j < shader->num_bindings; j++) {
315          int index = shader->bindings[j].index;
316          if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
317             assert(ctx->ubos[i][index].buffer_size <= screen->info.props.limits.maxUniformBufferRange);
318             struct zink_resource *res = zink_resource(ctx->ubos[i][index].buffer);
319             assert(!res || ctx->ubos[i][index].buffer_size > 0);
320             assert(!res || ctx->ubos[i][index].buffer);
321             write_desc_resources[num_wds] = res;
322             buffer_infos[num_buffer_info].buffer = res ? res->buffer :
323                                                    (screen->info.rb2_feats.nullDescriptor ?
324                                                     VK_NULL_HANDLE :
325                                                     zink_resource(ctx->dummy_buffer)->buffer);
326             buffer_infos[num_buffer_info].offset = res ? ctx->ubos[i][index].buffer_offset : 0;
327             buffer_infos[num_buffer_info].range  = res ? ctx->ubos[i][index].buffer_size : VK_WHOLE_SIZE;
328             wds[num_wds].pBufferInfo = buffer_infos + num_buffer_info;
329             ++num_buffer_info;
330          } else {
331             struct pipe_sampler_view *psampler_view = ctx->image_views[i][index];
332             struct zink_sampler_view *sampler_view = zink_sampler_view(psampler_view);
333 
334             struct zink_resource *res = psampler_view ? zink_resource(psampler_view->texture) : NULL;
335             write_desc_resources[num_wds] = res;
336             if (!res) {
337                /* if we're hitting this assert often, we can probably just throw a junk buffer in since
338                 * the results of this codepath are undefined in ARB_texture_buffer_object spec
339                 */
340                assert(screen->info.rb2_feats.nullDescriptor);
341                if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER)
342                   wds[num_wds].pTexelBufferView = &buffer_view[0];
343                else {
344                   image_infos[num_image_info].imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
345                   image_infos[num_image_info].imageView = VK_NULL_HANDLE;
346                   image_infos[num_image_info].sampler = ctx->samplers[i][index];
347                   wds[num_wds].pImageInfo = image_infos + num_image_info;
348                   ++num_image_info;
349                }
350             } else if (res->base.target == PIPE_BUFFER)
351                wds[num_wds].pTexelBufferView = &sampler_view->buffer_view;
352             else {
353                VkImageLayout layout = res->layout;
354                if (layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL &&
355                    layout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
356                    layout != VK_IMAGE_LAYOUT_GENERAL) {
357                   transitions[num_transitions++] = res;
358                   layout = VK_IMAGE_LAYOUT_GENERAL;
359                }
360                image_infos[num_image_info].imageLayout = layout;
361                image_infos[num_image_info].imageView = sampler_view->image_view;
362                image_infos[num_image_info].sampler = ctx->samplers[i][index];
363                wds[num_wds].pImageInfo = image_infos + num_image_info;
364                ++num_image_info;
365             }
366          }
367 
368          wds[num_wds].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
369          wds[num_wds].pNext = NULL;
370          wds[num_wds].dstBinding = shader->bindings[j].binding;
371          wds[num_wds].dstArrayElement = 0;
372          wds[num_wds].descriptorCount = 1;
373          wds[num_wds].descriptorType = shader->bindings[j].type;
374          ++num_wds;
375       }
376    }
377 
378    struct zink_batch *batch;
379    if (num_transitions > 0) {
380       batch = zink_batch_no_rp(ctx);
381 
382       for (int i = 0; i < num_transitions; ++i)
383          zink_resource_barrier(batch->cmdbuf, transitions[i],
384                                transitions[i]->aspect,
385                                VK_IMAGE_LAYOUT_GENERAL);
386    }
387 
388    if (ctx->xfb_barrier)
389       zink_emit_xfb_counter_barrier(ctx);
390 
391    if (ctx->dirty_so_targets)
392       zink_emit_stream_output_targets(pctx);
393 
394    if (so_target && zink_resource(so_target->base.buffer)->needs_xfb_barrier)
395       zink_emit_xfb_vertex_input_barrier(ctx, zink_resource(so_target->base.buffer));
396 
397 
398    batch = zink_batch_rp(ctx);
399 
400    if (batch->descs_left < gfx_program->num_descriptors) {
401       ctx->base.flush(&ctx->base, NULL, 0);
402       batch = zink_batch_rp(ctx);
403       assert(batch->descs_left >= gfx_program->num_descriptors);
404    }
405    zink_batch_reference_program(batch, ctx->curr_program);
406 
407    VkDescriptorSet desc_set = allocate_descriptor_set(screen, batch,
408                                                       gfx_program);
409    assert(desc_set != VK_NULL_HANDLE);
410 
411    for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
412       struct zink_shader *shader = ctx->gfx_stages[i];
413       if (!shader)
414          continue;
415 
416       for (int j = 0; j < shader->num_bindings; j++) {
417          int index = shader->bindings[j].index;
418          if (shader->bindings[j].type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
419             struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->image_views[i][index]);
420             if (sampler_view)
421                zink_batch_reference_sampler_view(batch, sampler_view);
422          }
423       }
424    }
425 
426    vkCmdSetViewport(batch->cmdbuf, 0, ctx->gfx_pipeline_state.num_viewports, ctx->viewports);
427    if (ctx->rast_state->base.scissor)
428       vkCmdSetScissor(batch->cmdbuf, 0, ctx->gfx_pipeline_state.num_viewports, ctx->scissors);
429    else if (ctx->fb_state.width && ctx->fb_state.height) {
430       VkRect2D fb_scissor[ctx->gfx_pipeline_state.num_viewports];
431       for (unsigned i = 0; i < ctx->gfx_pipeline_state.num_viewports; i++) {
432          fb_scissor[i].offset.x = fb_scissor[i].offset.y = 0;
433          fb_scissor[i].extent.width = ctx->fb_state.width;
434          fb_scissor[i].extent.height = ctx->fb_state.height;
435       }
436       vkCmdSetScissor(batch->cmdbuf, 0, ctx->gfx_pipeline_state.num_viewports, fb_scissor);
437    }
438 
439    if (line_width_needed(reduced_prim, rast_state->hw_state.polygon_mode)) {
440       if (screen->info.feats.features.wideLines || ctx->line_width == 1.0f)
441          vkCmdSetLineWidth(batch->cmdbuf, ctx->line_width);
442       else
443          debug_printf("BUG: wide lines not supported, needs fallback!");
444    }
445 
446    if (dsa_state->base.stencil[0].enabled) {
447       if (dsa_state->base.stencil[1].enabled) {
448          vkCmdSetStencilReference(batch->cmdbuf, VK_STENCIL_FACE_FRONT_BIT,
449                                   ctx->stencil_ref.ref_value[0]);
450          vkCmdSetStencilReference(batch->cmdbuf, VK_STENCIL_FACE_BACK_BIT,
451                                   ctx->stencil_ref.ref_value[1]);
452       } else
453          vkCmdSetStencilReference(batch->cmdbuf,
454                                   VK_STENCIL_FACE_FRONT_AND_BACK,
455                                   ctx->stencil_ref.ref_value[0]);
456    }
457 
458    if (depth_bias)
459       vkCmdSetDepthBias(batch->cmdbuf, rast_state->offset_units, rast_state->offset_clamp, rast_state->offset_scale);
460    else
461       vkCmdSetDepthBias(batch->cmdbuf, 0.0f, 0.0f, 0.0f);
462 
463    if (ctx->gfx_pipeline_state.blend_state->need_blend_constants)
464       vkCmdSetBlendConstants(batch->cmdbuf, ctx->blend_constants);
465 
466    if (num_wds > 0) {
467       for (int i = 0; i < num_wds; ++i) {
468          wds[i].dstSet = desc_set;
469          if (write_desc_resources[i])
470             zink_batch_reference_resource_rw(batch, write_desc_resources[i], false);
471       }
472       vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
473    }
474 
475    vkCmdBindPipeline(batch->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
476    vkCmdBindDescriptorSets(batch->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS,
477                            gfx_program->layout, 0, 1, &desc_set, 0, NULL);
478    zink_bind_vertex_buffers(batch, ctx);
479 
480    zink_query_update_gs_states(ctx);
481 
482    if (ctx->num_so_targets) {
483       for (unsigned i = 0; i < ctx->num_so_targets; i++) {
484          struct zink_so_target *t = zink_so_target(ctx->so_targets[i]);
485          struct zink_resource *res = zink_resource(t->counter_buffer);
486          if (t->counter_buffer_valid) {
487             zink_batch_reference_resource_rw(batch, res, true);
488             counter_buffers[i] = res->buffer;
489             counter_buffer_offsets[i] = t->counter_buffer_offset;
490          } else
491             counter_buffers[i] = VK_NULL_HANDLE;
492       }
493       screen->vk_CmdBeginTransformFeedbackEXT(batch->cmdbuf, 0, ctx->num_so_targets, counter_buffers, counter_buffer_offsets);
494    }
495 
496    if (dinfo->index_size > 0) {
497       VkIndexType index_type;
498       unsigned index_size = dinfo->index_size;
499       if (need_index_buffer_unref)
500          /* index buffer will have been promoted from uint8 to uint16 in this case */
501          index_size = MAX2(index_size, 2);
502       switch (index_size) {
503       case 1:
504          assert(screen->info.have_EXT_index_type_uint8);
505          index_type = VK_INDEX_TYPE_UINT8_EXT;
506          break;
507       case 2:
508          index_type = VK_INDEX_TYPE_UINT16;
509          break;
510       case 4:
511          index_type = VK_INDEX_TYPE_UINT32;
512          break;
513       default:
514          unreachable("unknown index size!");
515       }
516       struct zink_resource *res = zink_resource(index_buffer);
517       vkCmdBindIndexBuffer(batch->cmdbuf, res->buffer, index_offset, index_type);
518       zink_batch_reference_resource_rw(batch, res, false);
519       if (dinfo->indirect) {
520          struct zink_resource *indirect = zink_resource(dinfo->indirect->buffer);
521          zink_batch_reference_resource_rw(batch, indirect, false);
522          vkCmdDrawIndexedIndirect(batch->cmdbuf, indirect->buffer, dinfo->indirect->offset, dinfo->indirect->draw_count, dinfo->indirect->stride);
523       } else
524          vkCmdDrawIndexed(batch->cmdbuf,
525             dinfo->count, dinfo->instance_count,
526             need_index_buffer_unref ? 0 : dinfo->start, dinfo->index_bias, dinfo->start_instance);
527    } else {
528       if (so_target && screen->info.tf_props.transformFeedbackDraw) {
529          zink_batch_reference_resource_rw(batch, zink_resource(so_target->counter_buffer), true);
530          screen->vk_CmdDrawIndirectByteCountEXT(batch->cmdbuf, dinfo->instance_count, dinfo->start_instance,
531                                        zink_resource(so_target->counter_buffer)->buffer, so_target->counter_buffer_offset, 0,
532                                        MIN2(so_target->stride, screen->info.tf_props.maxTransformFeedbackBufferDataStride));
533       } else if (dinfo->indirect) {
534          struct zink_resource *indirect = zink_resource(dinfo->indirect->buffer);
535          zink_batch_reference_resource_rw(batch, indirect, false);
536          vkCmdDrawIndirect(batch->cmdbuf, indirect->buffer, dinfo->indirect->offset, dinfo->indirect->draw_count, dinfo->indirect->stride);
537       } else
538          vkCmdDraw(batch->cmdbuf, dinfo->count, dinfo->instance_count, dinfo->start, dinfo->start_instance);
539    }
540 
541    if (dinfo->index_size > 0 && (dinfo->has_user_indices || need_index_buffer_unref))
542       pipe_resource_reference(&index_buffer, NULL);
543 
544    if (ctx->num_so_targets) {
545       for (unsigned i = 0; i < ctx->num_so_targets; i++) {
546          struct zink_so_target *t = zink_so_target(ctx->so_targets[i]);
547          counter_buffers[i] = zink_resource(t->counter_buffer)->buffer;
548          counter_buffer_offsets[i] = t->counter_buffer_offset;
549          t->counter_buffer_valid = true;
550          zink_resource(ctx->so_targets[i]->buffer)->needs_xfb_barrier = true;
551       }
552       screen->vk_CmdEndTransformFeedbackEXT(batch->cmdbuf, 0, ctx->num_so_targets, counter_buffers, counter_buffer_offsets);
553    }
554 }
555