1 /*
2 * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3 * SPDX-License-Identifier: MIT
4 */
5 #include "nvk_buffer.h"
6 #include "nvk_cmd_buffer.h"
7 #include "nvk_descriptor_set.h"
8 #include "nvk_device.h"
9 #include "nvk_entrypoints.h"
10 #include "nvk_image.h"
11 #include "nvk_physical_device.h"
12
13 #include "nv_push_cl9097.h"
14 #include "nv_push_clb197.h"
15
16 static VkResult
nvk_cmd_bind_map_buffer(struct vk_command_buffer * vk_cmd,struct vk_meta_device * meta,VkBuffer _buffer,void ** map_out)17 nvk_cmd_bind_map_buffer(struct vk_command_buffer *vk_cmd,
18 struct vk_meta_device *meta,
19 VkBuffer _buffer, void **map_out)
20 {
21 struct nvk_cmd_buffer *cmd =
22 container_of(vk_cmd, struct nvk_cmd_buffer, vk);
23 VK_FROM_HANDLE(nvk_buffer, buffer, _buffer);
24 VkResult result;
25
26 uint64_t addr;
27 assert(buffer->vk.size < UINT_MAX);
28 result = nvk_cmd_buffer_upload_alloc(cmd, buffer->vk.size, 16,
29 &addr, map_out);
30 if (unlikely(result != VK_SUCCESS))
31 return result;
32
33 buffer->addr = addr;
34
35 return VK_SUCCESS;
36 }
37
38 VkResult
nvk_device_init_meta(struct nvk_device * dev)39 nvk_device_init_meta(struct nvk_device *dev)
40 {
41 struct nvk_physical_device *pdev = nvk_device_physical(dev);
42
43 VkResult result = vk_meta_device_init(&dev->vk, &dev->meta);
44 if (result != VK_SUCCESS)
45 return result;
46
47 dev->meta.use_gs_for_layer = pdev->info.cls_eng3d < MAXWELL_B;
48 dev->meta.use_rect_list_pipeline = true;
49 dev->meta.cmd_bind_map_buffer = nvk_cmd_bind_map_buffer;
50 dev->meta.max_bind_map_buffer_size_B = 64 * 1024; /* TODO */
51
52 return VK_SUCCESS;
53 }
54
55 void
nvk_device_finish_meta(struct nvk_device * dev)56 nvk_device_finish_meta(struct nvk_device *dev)
57 {
58 vk_meta_device_finish(&dev->vk, &dev->meta);
59 }
60
61 struct nvk_meta_save {
62 struct vk_vertex_input_state _dynamic_vi;
63 struct vk_sample_locations_state _dynamic_sl;
64 struct vk_dynamic_graphics_state dynamic;
65 struct nvk_shader *shaders[MESA_SHADER_MESH + 1];
66 struct nvk_addr_range vb0;
67 struct nvk_descriptor_set_binding desc0;
68 struct nvk_buffer_address desc0_set_addr;
69 struct nvk_push_descriptor_set push_desc0;
70 uint8_t set_dynamic_buffer_start[NVK_MAX_SETS];
71 uint8_t push[NVK_MAX_PUSH_SIZE];
72 };
73
74 static void
nvk_meta_begin(struct nvk_cmd_buffer * cmd,struct nvk_meta_save * save)75 nvk_meta_begin(struct nvk_cmd_buffer *cmd,
76 struct nvk_meta_save *save)
77 {
78 const struct nvk_descriptor_state *desc = &cmd->state.gfx.descriptors;
79
80 save->dynamic = cmd->vk.dynamic_graphics_state;
81 save->_dynamic_vi = cmd->state.gfx._dynamic_vi;
82 save->_dynamic_sl = cmd->state.gfx._dynamic_sl;
83
84 STATIC_ASSERT(sizeof(cmd->state.gfx.shaders) == sizeof(save->shaders));
85 memcpy(save->shaders, cmd->state.gfx.shaders, sizeof(save->shaders));
86
87 save->vb0 = cmd->state.gfx.vb0;
88
89 save->desc0 = desc->sets[0];
90 nvk_descriptor_state_get_root(desc, sets[0], &save->desc0_set_addr);
91 if (desc->sets[0].push != NULL)
92 save->push_desc0 = *desc->sets[0].push;
93
94 nvk_descriptor_state_get_root_array(desc, set_dynamic_buffer_start,
95 0, NVK_MAX_SETS,
96 save->set_dynamic_buffer_start);
97 nvk_descriptor_state_get_root_array(desc, push, 0, NVK_MAX_PUSH_SIZE,
98 save->push);
99
100 struct nv_push *p = nvk_cmd_buffer_push(cmd, 2);
101 P_IMMD(p, NV9097, SET_STATISTICS_COUNTER, {
102 .da_vertices_generated_enable = false,
103 .da_primitives_generated_enable = false,
104 .vs_invocations_enable = false,
105 .gs_invocations_enable = false,
106 .gs_primitives_generated_enable = false,
107 .streaming_primitives_succeeded_enable = false,
108 .streaming_primitives_needed_enable = false,
109 .clipper_invocations_enable = false,
110 .clipper_primitives_generated_enable = false,
111 .ps_invocations_enable = false,
112 .ti_invocations_enable = false,
113 .ts_invocations_enable = false,
114 .ts_primitives_generated_enable = false,
115 .total_streaming_primitives_needed_succeeded_enable = false,
116 .vtg_primitives_out_enable = false,
117 });
118 }
119
120 static void
nvk_meta_init_render(struct nvk_cmd_buffer * cmd,struct vk_meta_rendering_info * info)121 nvk_meta_init_render(struct nvk_cmd_buffer *cmd,
122 struct vk_meta_rendering_info *info)
123 {
124 const struct nvk_rendering_state *render = &cmd->state.gfx.render;
125
126 *info = (struct vk_meta_rendering_info) {
127 .view_mask = render->view_mask,
128 .color_attachment_count = render->color_att_count,
129 .depth_attachment_format = render->depth_att.vk_format,
130 .stencil_attachment_format = render->stencil_att.vk_format,
131 };
132 for (uint32_t a = 0; a < render->color_att_count; a++) {
133 info->color_attachment_formats[a] = render->color_att[a].vk_format;
134 info->color_attachment_write_masks[a] =
135 VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |
136 VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
137 }
138 }
139
140 static void
nvk_meta_end(struct nvk_cmd_buffer * cmd,struct nvk_meta_save * save)141 nvk_meta_end(struct nvk_cmd_buffer *cmd,
142 struct nvk_meta_save *save)
143 {
144 struct nvk_descriptor_state *desc = &cmd->state.gfx.descriptors;
145
146 switch (save->desc0.type) {
147 case NVK_DESCRIPTOR_SET_TYPE_NONE:
148 desc->sets[0].type = NVK_DESCRIPTOR_SET_TYPE_NONE;
149 break;
150
151 case NVK_DESCRIPTOR_SET_TYPE_SET: {
152 desc->sets[0].type = NVK_DESCRIPTOR_SET_TYPE_SET;
153 desc->sets[0].set = save->desc0.set;
154 struct nvk_buffer_address addr = nvk_descriptor_set_addr(save->desc0.set);
155 nvk_descriptor_state_set_root(cmd, desc, sets[0], addr);
156 break;
157 }
158
159 case NVK_DESCRIPTOR_SET_TYPE_PUSH:
160 desc->sets[0].type = NVK_DESCRIPTOR_SET_TYPE_PUSH;
161 desc->sets[0].set = NULL;
162 *desc->sets[0].push = save->push_desc0;
163 desc->push_dirty |= BITFIELD_BIT(0);
164 break;
165
166 case NVK_DESCRIPTOR_SET_TYPE_BUFFER:
167 desc->sets[0].type = NVK_DESCRIPTOR_SET_TYPE_BUFFER;
168 desc->sets[0].set = NULL;
169 nvk_descriptor_state_set_root(cmd, desc, sets[0], save->desc0_set_addr);
170 break;
171
172 default:
173 unreachable("Unknown descriptor set type");
174 }
175 nvk_cmd_dirty_cbufs_for_descriptors(cmd, ~0, 0, 1);
176
177 /* Restore set_dynaic_buffer_start because meta binding set 0 can disturb
178 * all dynamic buffers starts for all sets.
179 */
180 nvk_descriptor_state_set_root_array(cmd, desc, set_dynamic_buffer_start,
181 0, NVK_MAX_SETS,
182 save->set_dynamic_buffer_start);
183
184 /* Restore the dynamic state */
185 assert(save->dynamic.vi == &cmd->state.gfx._dynamic_vi);
186 assert(save->dynamic.ms.sample_locations == &cmd->state.gfx._dynamic_sl);
187 cmd->vk.dynamic_graphics_state = save->dynamic;
188 cmd->state.gfx._dynamic_vi = save->_dynamic_vi;
189 cmd->state.gfx._dynamic_sl = save->_dynamic_sl;
190 memcpy(cmd->vk.dynamic_graphics_state.dirty,
191 cmd->vk.dynamic_graphics_state.set,
192 sizeof(cmd->vk.dynamic_graphics_state.set));
193
194 for (uint32_t stage = 0; stage < ARRAY_SIZE(save->shaders); stage++) {
195 if (stage == MESA_SHADER_COMPUTE)
196 continue;
197
198 nvk_cmd_bind_graphics_shader(cmd, stage, save->shaders[stage]);
199 }
200
201 nvk_cmd_bind_vertex_buffer(cmd, 0, save->vb0);
202
203 nvk_descriptor_state_set_root_array(cmd, desc, push, 0, sizeof(save->push),
204 save->push);
205
206 struct nv_push *p = nvk_cmd_buffer_push(cmd, 2);
207 P_IMMD(p, NV9097, SET_STATISTICS_COUNTER, {
208 .da_vertices_generated_enable = true,
209 .da_primitives_generated_enable = true,
210 .vs_invocations_enable = true,
211 .gs_invocations_enable = true,
212 .gs_primitives_generated_enable = true,
213 .streaming_primitives_succeeded_enable = true,
214 .streaming_primitives_needed_enable = true,
215 .clipper_invocations_enable = true,
216 .clipper_primitives_generated_enable = true,
217 .ps_invocations_enable = true,
218 .ti_invocations_enable = true,
219 .ts_invocations_enable = true,
220 .ts_primitives_generated_enable = true,
221 .total_streaming_primitives_needed_succeeded_enable = true,
222 .vtg_primitives_out_enable = true,
223 });
224 }
225
226 VKAPI_ATTR void VKAPI_CALL
nvk_CmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)227 nvk_CmdBlitImage2(VkCommandBuffer commandBuffer,
228 const VkBlitImageInfo2 *pBlitImageInfo)
229 {
230 VK_FROM_HANDLE(nvk_cmd_buffer, cmd, commandBuffer);
231 struct nvk_device *dev = nvk_cmd_buffer_device(cmd);
232
233 struct nvk_meta_save save;
234 nvk_meta_begin(cmd, &save);
235
236 vk_meta_blit_image2(&cmd->vk, &dev->meta, pBlitImageInfo);
237
238 nvk_meta_end(cmd, &save);
239 }
240
241 VKAPI_ATTR void VKAPI_CALL
nvk_CmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)242 nvk_CmdResolveImage2(VkCommandBuffer commandBuffer,
243 const VkResolveImageInfo2 *pResolveImageInfo)
244 {
245 VK_FROM_HANDLE(nvk_cmd_buffer, cmd, commandBuffer);
246 struct nvk_device *dev = nvk_cmd_buffer_device(cmd);
247
248 struct nvk_meta_save save;
249 nvk_meta_begin(cmd, &save);
250
251 vk_meta_resolve_image2(&cmd->vk, &dev->meta, pResolveImageInfo);
252
253 nvk_meta_end(cmd, &save);
254 }
255
256 void
nvk_meta_resolve_rendering(struct nvk_cmd_buffer * cmd,const VkRenderingInfo * pRenderingInfo)257 nvk_meta_resolve_rendering(struct nvk_cmd_buffer *cmd,
258 const VkRenderingInfo *pRenderingInfo)
259 {
260 struct nvk_device *dev = nvk_cmd_buffer_device(cmd);
261
262 struct nvk_meta_save save;
263 nvk_meta_begin(cmd, &save);
264
265 vk_meta_resolve_rendering(&cmd->vk, &dev->meta, pRenderingInfo);
266
267 nvk_meta_end(cmd, &save);
268 }
269