1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * SPDX-License-Identifier: MIT
5 */
6
7 #include <assert.h>
8 #include <stdbool.h>
9
10 #include "radv_meta.h"
11 #include "sid.h"
12
13 static nir_shader *
build_expand_depth_stencil_compute_shader(struct radv_device * dev)14 build_expand_depth_stencil_compute_shader(struct radv_device *dev)
15 {
16 const struct glsl_type *img_type = glsl_image_type(GLSL_SAMPLER_DIM_2D, false, GLSL_TYPE_FLOAT);
17
18 nir_builder b = radv_meta_init_shader(dev, MESA_SHADER_COMPUTE, "expand_depth_stencil_compute");
19
20 /* We need at least 8/8/1 to cover an entire HTILE block in a single workgroup. */
21 b.shader->info.workgroup_size[0] = 8;
22 b.shader->info.workgroup_size[1] = 8;
23 nir_variable *input_img = nir_variable_create(b.shader, nir_var_image, img_type, "in_img");
24 input_img->data.descriptor_set = 0;
25 input_img->data.binding = 0;
26
27 nir_variable *output_img = nir_variable_create(b.shader, nir_var_image, img_type, "out_img");
28 output_img->data.descriptor_set = 0;
29 output_img->data.binding = 1;
30
31 nir_def *invoc_id = nir_load_local_invocation_id(&b);
32 nir_def *wg_id = nir_load_workgroup_id(&b);
33 nir_def *block_size = nir_imm_ivec4(&b, b.shader->info.workgroup_size[0], b.shader->info.workgroup_size[1],
34 b.shader->info.workgroup_size[2], 0);
35
36 nir_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
37
38 nir_def *data = nir_image_deref_load(&b, 4, 32, &nir_build_deref_var(&b, input_img)->def, global_id,
39 nir_undef(&b, 1, 32), nir_imm_int(&b, 0), .image_dim = GLSL_SAMPLER_DIM_2D);
40
41 /* We need a SCOPE_DEVICE memory_scope because ACO will avoid
42 * creating a vmcnt(0) because it expects the L1 cache to keep memory
43 * operations in-order for the same workgroup. The vmcnt(0) seems
44 * necessary however. */
45 nir_barrier(&b, .execution_scope = SCOPE_WORKGROUP, .memory_scope = SCOPE_DEVICE,
46 .memory_semantics = NIR_MEMORY_ACQ_REL, .memory_modes = nir_var_mem_ssbo);
47
48 nir_image_deref_store(&b, &nir_build_deref_var(&b, output_img)->def, global_id, nir_undef(&b, 1, 32), data,
49 nir_imm_int(&b, 0), .image_dim = GLSL_SAMPLER_DIM_2D);
50 return b.shader;
51 }
52
53 struct radv_htile_expand_key {
54 enum radv_meta_object_key_type type;
55 uint32_t samples;
56 };
57
58 static VkResult
get_pipeline_gfx(struct radv_device * device,struct radv_image * image,VkPipeline * pipeline_out,VkPipelineLayout * layout_out)59 get_pipeline_gfx(struct radv_device *device, struct radv_image *image, VkPipeline *pipeline_out, VkPipelineLayout *layout_out)
60 {
61 const uint32_t samples = image->vk.samples;
62 struct radv_htile_expand_key key;
63 VkResult result;
64
65 result = radv_meta_get_noop_pipeline_layout(device, layout_out);
66 if (result != VK_SUCCESS)
67 return result;
68
69 memset(&key, 0, sizeof(key));
70 key.type = RADV_META_OBJECT_KEY_HTILE_EXPAND_GFX;
71 key.samples = samples;
72
73 VkPipeline pipeline_from_cache = vk_meta_lookup_pipeline(&device->meta_state.device, &key, sizeof(key));
74 if (pipeline_from_cache != VK_NULL_HANDLE) {
75 *pipeline_out = pipeline_from_cache;
76 return VK_SUCCESS;
77 }
78
79 nir_shader *vs_module = radv_meta_build_nir_vs_generate_vertices(device);
80 nir_shader *fs_module = radv_meta_build_nir_fs_noop(device);
81
82 const VkPipelineSampleLocationsStateCreateInfoEXT sample_locs_create_info = {
83 .sType = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
84 .sampleLocationsEnable = false,
85 };
86
87 const VkGraphicsPipelineCreateInfoRADV radv_info = {
88 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO_RADV,
89 .depth_compress_disable = true,
90 .stencil_compress_disable = true,
91 };
92
93 const VkGraphicsPipelineCreateInfo pipeline_create_info = {
94 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
95 .pNext = &radv_info,
96 .stageCount = 2,
97 .pStages =
98 (VkPipelineShaderStageCreateInfo[]){
99 {
100 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
101 .stage = VK_SHADER_STAGE_VERTEX_BIT,
102 .module = vk_shader_module_handle_from_nir(vs_module),
103 .pName = "main",
104 },
105 {
106 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
107 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
108 .module = vk_shader_module_handle_from_nir(fs_module),
109 .pName = "main",
110 },
111 },
112 .pVertexInputState =
113 &(VkPipelineVertexInputStateCreateInfo){
114 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
115 .vertexBindingDescriptionCount = 0,
116 .vertexAttributeDescriptionCount = 0,
117 },
118 .pInputAssemblyState =
119 &(VkPipelineInputAssemblyStateCreateInfo){
120 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
121 .topology = VK_PRIMITIVE_TOPOLOGY_META_RECT_LIST_MESA,
122 .primitiveRestartEnable = false,
123 },
124 .pViewportState =
125 &(VkPipelineViewportStateCreateInfo){
126 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
127 .viewportCount = 1,
128 .scissorCount = 1,
129 },
130 .pRasterizationState =
131 &(VkPipelineRasterizationStateCreateInfo){
132 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
133 .depthClampEnable = false,
134 .rasterizerDiscardEnable = false,
135 .polygonMode = VK_POLYGON_MODE_FILL,
136 .cullMode = VK_CULL_MODE_NONE,
137 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
138 },
139 .pMultisampleState =
140 &(VkPipelineMultisampleStateCreateInfo){
141 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
142 .pNext = &sample_locs_create_info,
143 .rasterizationSamples = samples,
144 .sampleShadingEnable = false,
145 .pSampleMask = NULL,
146 .alphaToCoverageEnable = false,
147 .alphaToOneEnable = false,
148 },
149 .pColorBlendState =
150 &(VkPipelineColorBlendStateCreateInfo){
151 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
152 .logicOpEnable = false,
153 .attachmentCount = 0,
154 .pAttachments = NULL,
155 },
156 .pDepthStencilState =
157 &(VkPipelineDepthStencilStateCreateInfo){
158 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
159 .depthTestEnable = false,
160 .depthWriteEnable = false,
161 .depthBoundsTestEnable = false,
162 .stencilTestEnable = false,
163 },
164 .pDynamicState =
165 &(VkPipelineDynamicStateCreateInfo){
166 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
167 .dynamicStateCount = 3,
168 .pDynamicStates =
169 (VkDynamicState[]){
170 VK_DYNAMIC_STATE_VIEWPORT,
171 VK_DYNAMIC_STATE_SCISSOR,
172 VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
173 },
174 },
175 .layout = *layout_out,
176 };
177
178 struct vk_meta_rendering_info render = {
179 .depth_attachment_format = VK_FORMAT_D32_SFLOAT_S8_UINT,
180 .stencil_attachment_format = VK_FORMAT_D32_SFLOAT_S8_UINT,
181 };
182
183 result = vk_meta_create_graphics_pipeline(&device->vk, &device->meta_state.device, &pipeline_create_info, &render,
184 &key, sizeof(key), pipeline_out);
185
186 ralloc_free(vs_module);
187 ralloc_free(fs_module);
188
189 return result;
190 }
191
192 static void
radv_process_depth_image_layer(struct radv_cmd_buffer * cmd_buffer,struct radv_image * image,const VkImageSubresourceRange * range,int level,int layer)193 radv_process_depth_image_layer(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image,
194 const VkImageSubresourceRange *range, int level, int layer)
195 {
196 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer);
197 struct radv_image_view iview;
198 uint32_t width, height;
199
200 width = u_minify(image->vk.extent.width, range->baseMipLevel + level);
201 height = u_minify(image->vk.extent.height, range->baseMipLevel + level);
202
203 radv_image_view_init(&iview, device,
204 &(VkImageViewCreateInfo){
205 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
206 .image = radv_image_to_handle(image),
207 .viewType = radv_meta_get_view_type(image),
208 .format = image->vk.format,
209 .subresourceRange =
210 {
211 .aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
212 .baseMipLevel = range->baseMipLevel + level,
213 .levelCount = 1,
214 .baseArrayLayer = range->baseArrayLayer + layer,
215 .layerCount = 1,
216 },
217 },
218 NULL);
219
220 const VkRenderingAttachmentInfo depth_att = {
221 .sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
222 .imageView = radv_image_view_to_handle(&iview),
223 .imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
224 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
225 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
226 };
227
228 const VkRenderingAttachmentInfo stencil_att = {
229 .sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
230 .imageView = radv_image_view_to_handle(&iview),
231 .imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
232 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
233 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
234 };
235
236 const VkRenderingInfo rendering_info = {
237 .sType = VK_STRUCTURE_TYPE_RENDERING_INFO,
238 .flags = VK_RENDERING_INPUT_ATTACHMENT_NO_CONCURRENT_WRITES_BIT_MESA,
239 .renderArea = {.offset = {0, 0}, .extent = {width, height}},
240 .layerCount = 1,
241 .pDepthAttachment = &depth_att,
242 .pStencilAttachment = &stencil_att,
243 };
244
245 radv_CmdBeginRendering(radv_cmd_buffer_to_handle(cmd_buffer), &rendering_info);
246
247 radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
248
249 radv_CmdEndRendering(radv_cmd_buffer_to_handle(cmd_buffer));
250
251 radv_image_view_finish(&iview);
252 }
253
254 static void
radv_process_depth_stencil(struct radv_cmd_buffer * cmd_buffer,struct radv_image * image,const VkImageSubresourceRange * subresourceRange,struct radv_sample_locations_state * sample_locs)255 radv_process_depth_stencil(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image,
256 const VkImageSubresourceRange *subresourceRange,
257 struct radv_sample_locations_state *sample_locs)
258 {
259 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer);
260 struct radv_meta_saved_state saved_state;
261 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
262 VkPipelineLayout layout;
263 VkPipeline pipeline;
264 VkResult result;
265
266 result = get_pipeline_gfx(device, image, &pipeline, &layout);
267 if (result != VK_SUCCESS) {
268 vk_command_buffer_set_error(&cmd_buffer->vk, result);
269 return;
270 }
271
272 radv_meta_save(&saved_state, cmd_buffer, RADV_META_SAVE_GRAPHICS_PIPELINE | RADV_META_SAVE_RENDER);
273
274 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
275
276 if (sample_locs) {
277 assert(image->vk.create_flags & VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT);
278
279 /* Set the sample locations specified during explicit or
280 * automatic layout transitions, otherwise the depth decompress
281 * pass uses the default HW locations.
282 */
283 radv_CmdSetSampleLocationsEXT(cmd_buffer_h, &(VkSampleLocationsInfoEXT){
284 .sampleLocationsPerPixel = sample_locs->per_pixel,
285 .sampleLocationGridSize = sample_locs->grid_size,
286 .sampleLocationsCount = sample_locs->count,
287 .pSampleLocations = sample_locs->locations,
288 });
289 }
290
291 for (uint32_t l = 0; l < vk_image_subresource_level_count(&image->vk, subresourceRange); ++l) {
292
293 /* Do not decompress levels without HTILE. */
294 if (!radv_htile_enabled(image, subresourceRange->baseMipLevel + l))
295 continue;
296
297 uint32_t width = u_minify(image->vk.extent.width, subresourceRange->baseMipLevel + l);
298 uint32_t height = u_minify(image->vk.extent.height, subresourceRange->baseMipLevel + l);
299
300 radv_CmdSetViewport(
301 cmd_buffer_h, 0, 1,
302 &(VkViewport){.x = 0, .y = 0, .width = width, .height = height, .minDepth = 0.0f, .maxDepth = 1.0f});
303
304 radv_CmdSetScissor(cmd_buffer_h, 0, 1,
305 &(VkRect2D){
306 .offset = {0, 0},
307 .extent = {width, height},
308 });
309
310 for (uint32_t s = 0; s < vk_image_subresource_layer_count(&image->vk, subresourceRange); s++) {
311 radv_process_depth_image_layer(cmd_buffer, image, subresourceRange, l, s);
312 }
313 }
314
315 radv_meta_restore(&saved_state, cmd_buffer);
316 }
317
318 static VkResult
get_pipeline_cs(struct radv_device * device,VkPipeline * pipeline_out,VkPipelineLayout * layout_out)319 get_pipeline_cs(struct radv_device *device, VkPipeline *pipeline_out, VkPipelineLayout *layout_out)
320 {
321 enum radv_meta_object_key_type key = RADV_META_OBJECT_KEY_HTILE_EXPAND_CS;
322 VkResult result;
323
324 const VkDescriptorSetLayoutBinding bindings[] = {
325 {
326 .binding = 0,
327 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
328 .descriptorCount = 1,
329 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
330 },
331 {
332 .binding = 1,
333 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
334 .descriptorCount = 1,
335 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
336 },
337
338 };
339
340 const VkDescriptorSetLayoutCreateInfo desc_info = {
341 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
342 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT,
343 .bindingCount = 2,
344 .pBindings = bindings,
345 };
346
347 result = vk_meta_get_pipeline_layout(&device->vk, &device->meta_state.device, &desc_info, NULL, &key, sizeof(key),
348 layout_out);
349 if (result != VK_SUCCESS)
350 return result;
351
352 VkPipeline pipeline_from_cache = vk_meta_lookup_pipeline(&device->meta_state.device, &key, sizeof(key));
353 if (pipeline_from_cache != VK_NULL_HANDLE) {
354 *pipeline_out = pipeline_from_cache;
355 return VK_SUCCESS;
356 }
357
358 nir_shader *cs = build_expand_depth_stencil_compute_shader(device);
359
360 const VkPipelineShaderStageCreateInfo stage_info = {
361 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
362 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
363 .module = vk_shader_module_handle_from_nir(cs),
364 .pName = "main",
365 .pSpecializationInfo = NULL,
366 };
367
368 const VkComputePipelineCreateInfo pipeline_info = {
369 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
370 .stage = stage_info,
371 .flags = 0,
372 .layout = *layout_out,
373 };
374
375 result = vk_meta_create_compute_pipeline(&device->vk, &device->meta_state.device, &pipeline_info, &key, sizeof(key),
376 pipeline_out);
377
378 ralloc_free(cs);
379 return result;
380 }
381
382 static void
radv_expand_depth_stencil_compute(struct radv_cmd_buffer * cmd_buffer,struct radv_image * image,const VkImageSubresourceRange * subresourceRange)383 radv_expand_depth_stencil_compute(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image,
384 const VkImageSubresourceRange *subresourceRange)
385 {
386 struct radv_device *device = radv_cmd_buffer_device(cmd_buffer);
387 struct radv_meta_saved_state saved_state;
388 struct radv_image_view load_iview = {0};
389 struct radv_image_view store_iview = {0};
390 VkPipelineLayout layout;
391 VkPipeline pipeline;
392 VkResult result;
393
394 assert(radv_image_is_tc_compat_htile(image));
395
396 result = get_pipeline_cs(device, &pipeline, &layout);
397 if (result != VK_SUCCESS) {
398 vk_command_buffer_set_error(&cmd_buffer->vk, result);
399 return;
400 }
401
402 radv_meta_save(&saved_state, cmd_buffer, RADV_META_SAVE_DESCRIPTORS | RADV_META_SAVE_COMPUTE_PIPELINE);
403
404 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline);
405
406 for (uint32_t l = 0; l < vk_image_subresource_level_count(&image->vk, subresourceRange); l++) {
407 uint32_t width, height;
408
409 /* Do not decompress levels without HTILE. */
410 if (!radv_htile_enabled(image, subresourceRange->baseMipLevel + l))
411 continue;
412
413 width = u_minify(image->vk.extent.width, subresourceRange->baseMipLevel + l);
414 height = u_minify(image->vk.extent.height, subresourceRange->baseMipLevel + l);
415
416 for (uint32_t s = 0; s < vk_image_subresource_layer_count(&image->vk, subresourceRange); s++) {
417 radv_image_view_init(&load_iview, device,
418 &(VkImageViewCreateInfo){
419 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
420 .image = radv_image_to_handle(image),
421 .viewType = VK_IMAGE_VIEW_TYPE_2D,
422 .format = image->vk.format,
423 .subresourceRange = {.aspectMask = subresourceRange->aspectMask,
424 .baseMipLevel = subresourceRange->baseMipLevel + l,
425 .levelCount = 1,
426 .baseArrayLayer = subresourceRange->baseArrayLayer + s,
427 .layerCount = 1},
428 },
429 &(struct radv_image_view_extra_create_info){.enable_compression = true});
430 radv_image_view_init(&store_iview, device,
431 &(VkImageViewCreateInfo){
432 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
433 .image = radv_image_to_handle(image),
434 .viewType = VK_IMAGE_VIEW_TYPE_2D,
435 .format = image->vk.format,
436 .subresourceRange = {.aspectMask = subresourceRange->aspectMask,
437 .baseMipLevel = subresourceRange->baseMipLevel + l,
438 .levelCount = 1,
439 .baseArrayLayer = subresourceRange->baseArrayLayer + s,
440 .layerCount = 1},
441 },
442 &(struct radv_image_view_extra_create_info){.disable_compression = true});
443
444 radv_meta_push_descriptor_set(
445 cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, layout, 0, 2,
446 (VkWriteDescriptorSet[]){{.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
447 .dstBinding = 0,
448 .dstArrayElement = 0,
449 .descriptorCount = 1,
450 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
451 .pImageInfo =
452 (VkDescriptorImageInfo[]){
453 {
454 .sampler = VK_NULL_HANDLE,
455 .imageView = radv_image_view_to_handle(&load_iview),
456 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
457 },
458 }},
459 {.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
460 .dstBinding = 1,
461 .dstArrayElement = 0,
462 .descriptorCount = 1,
463 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
464 .pImageInfo = (VkDescriptorImageInfo[]){
465 {
466 .sampler = VK_NULL_HANDLE,
467 .imageView = radv_image_view_to_handle(&store_iview),
468 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
469 },
470 }}});
471
472 radv_unaligned_dispatch(cmd_buffer, width, height, 1);
473
474 radv_image_view_finish(&load_iview);
475 radv_image_view_finish(&store_iview);
476 }
477 }
478
479 radv_meta_restore(&saved_state, cmd_buffer);
480
481 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_CS_PARTIAL_FLUSH | RADV_CMD_FLAG_INV_VCACHE |
482 radv_src_access_flush(cmd_buffer, VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT,
483 VK_ACCESS_2_SHADER_WRITE_BIT, 0, image, subresourceRange);
484
485 /* Initialize the HTILE metadata as "fully expanded". */
486 uint32_t htile_value = radv_get_htile_initial_value(device, image);
487
488 cmd_buffer->state.flush_bits |= radv_clear_htile(cmd_buffer, image, subresourceRange, htile_value, false);
489 }
490
491 void
radv_expand_depth_stencil(struct radv_cmd_buffer * cmd_buffer,struct radv_image * image,const VkImageSubresourceRange * subresourceRange,struct radv_sample_locations_state * sample_locs)492 radv_expand_depth_stencil(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image,
493 const VkImageSubresourceRange *subresourceRange,
494 struct radv_sample_locations_state *sample_locs)
495 {
496 struct radv_barrier_data barrier = {0};
497
498 barrier.layout_transitions.depth_stencil_expand = 1;
499 radv_describe_layout_transition(cmd_buffer, &barrier);
500
501 if (cmd_buffer->qf == RADV_QUEUE_GENERAL) {
502 radv_process_depth_stencil(cmd_buffer, image, subresourceRange, sample_locs);
503 } else {
504 radv_expand_depth_stencil_compute(cmd_buffer, image, subresourceRange);
505 }
506 }
507