1 /*
2 * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3 * SPDX-License-Identifier: MIT
4 */
5 #ifndef NVK_CMD_BUFFER_H
6 #define NVK_CMD_BUFFER_H 1
7
8 #include "nvk_private.h"
9
10 #include "nv_push.h"
11 #include "nvk_cmd_pool.h"
12 #include "nvk_descriptor_set.h"
13
14 #include "util/u_dynarray.h"
15
16 #include "vk_command_buffer.h"
17
18 #include <stdio.h>
19
20 struct nvk_buffer;
21 struct nvk_cbuf;
22 struct nvk_cmd_bo;
23 struct nvk_cmd_pool;
24 struct nvk_image_view;
25 struct nvk_push_descriptor_set;
26 struct nvk_shader;
27 struct vk_shader;
28
29 struct nvk_sample_location {
30 uint8_t x_u4:4;
31 uint8_t y_u4:4;
32 };
33
34 /** Root descriptor table. This gets pushed to the GPU directly */
35 struct nvk_root_descriptor_table {
36 uint64_t root_desc_addr;
37
38 union {
39 struct {
40 uint32_t base_vertex;
41 uint32_t base_instance;
42 uint32_t draw_id;
43 uint32_t view_index;
44 struct nvk_sample_location sample_locations[8];
45 } draw;
46 struct {
47 uint32_t base_group[3];
48 uint32_t group_count[3];
49 } cs;
50 };
51
52 /* Client push constants */
53 uint8_t push[NVK_MAX_PUSH_SIZE];
54
55 /* Descriptor set base addresses */
56 uint64_t sets[NVK_MAX_SETS];
57
58 /* Dynamic buffer bindings */
59 struct nvk_buffer_address dynamic_buffers[NVK_MAX_DYNAMIC_BUFFERS];
60
61 /* Start index in dynamic_buffers where each set starts */
62 uint8_t set_dynamic_buffer_start[NVK_MAX_SETS];
63
64 /* enfore alignment to 0x100 as needed pre pascal */
65 uint8_t __padding[0x18];
66 };
67
68 /* helper macro for computing root descriptor byte offsets */
69 #define nvk_root_descriptor_offset(member)\
70 offsetof(struct nvk_root_descriptor_table, member)
71
72 struct nvk_descriptor_state {
73 struct nvk_root_descriptor_table root;
74 uint32_t set_sizes[NVK_MAX_SETS];
75 struct nvk_descriptor_set *sets[NVK_MAX_SETS];
76 uint32_t sets_dirty;
77
78 struct nvk_push_descriptor_set *push[NVK_MAX_SETS];
79 uint32_t push_dirty;
80 };
81
82 struct nvk_attachment {
83 VkFormat vk_format;
84 struct nvk_image_view *iview;
85
86 VkResolveModeFlagBits resolve_mode;
87 struct nvk_image_view *resolve_iview;
88 };
89
90 struct nvk_rendering_state {
91 VkRenderingFlagBits flags;
92
93 VkRect2D area;
94 uint32_t layer_count;
95 uint32_t view_mask;
96 uint32_t samples;
97
98 uint32_t color_att_count;
99 struct nvk_attachment color_att[NVK_MAX_RTS];
100 struct nvk_attachment depth_att;
101 struct nvk_attachment stencil_att;
102 };
103
104 struct nvk_graphics_state {
105 struct nvk_rendering_state render;
106 struct nvk_descriptor_state descriptors;
107
108 uint32_t shaders_dirty;
109 struct nvk_shader *shaders[MESA_SHADER_MESH + 1];
110
111 /* Used for meta save/restore */
112 struct nvk_addr_range vb0;
113
114 /* Needed by vk_command_buffer::dynamic_graphics_state */
115 struct vk_vertex_input_state _dynamic_vi;
116 struct vk_sample_locations_state _dynamic_sl;
117 };
118
119 struct nvk_compute_state {
120 struct nvk_descriptor_state descriptors;
121 struct nvk_shader *shader;
122 };
123
124 struct nvk_cmd_push {
125 void *map;
126 uint64_t addr;
127 uint32_t range;
128 bool no_prefetch;
129 };
130
131 struct nvk_cmd_buffer {
132 struct vk_command_buffer vk;
133
134 struct {
135 struct nvk_graphics_state gfx;
136 struct nvk_compute_state cs;
137 } state;
138
139 /** List of nvk_cmd_bo
140 *
141 * This list exists entirely for ownership tracking. Everything in here
142 * must also be in pushes or bo_refs if it is to be referenced by this
143 * command buffer.
144 */
145 struct list_head bos;
146 struct list_head gart_bos;
147
148 struct nvk_cmd_bo *upload_bo;
149 uint32_t upload_offset;
150
151 struct nvk_cmd_bo *cond_render_gart_bo;
152 uint32_t cond_render_gart_offset;
153
154 struct nvk_cmd_bo *push_bo;
155 uint32_t *push_bo_limit;
156 struct nv_push push;
157
158 /** Array of struct nvk_cmd_push
159 *
160 * This acts both as a BO reference as well as provides a range in the
161 * buffer to use as a pushbuf.
162 */
163 struct util_dynarray pushes;
164
165 uint64_t tls_space_needed;
166 };
167
168 VK_DEFINE_HANDLE_CASTS(nvk_cmd_buffer, vk.base, VkCommandBuffer,
169 VK_OBJECT_TYPE_COMMAND_BUFFER)
170
171 extern const struct vk_command_buffer_ops nvk_cmd_buffer_ops;
172
173 static inline struct nvk_device *
nvk_cmd_buffer_device(struct nvk_cmd_buffer * cmd)174 nvk_cmd_buffer_device(struct nvk_cmd_buffer *cmd)
175 {
176 return (struct nvk_device *)cmd->vk.base.device;
177 }
178
179 static inline struct nvk_cmd_pool *
nvk_cmd_buffer_pool(struct nvk_cmd_buffer * cmd)180 nvk_cmd_buffer_pool(struct nvk_cmd_buffer *cmd)
181 {
182 return (struct nvk_cmd_pool *)cmd->vk.pool;
183 }
184
185 void nvk_cmd_buffer_new_push(struct nvk_cmd_buffer *cmd);
186
187 #define NVK_CMD_BUFFER_MAX_PUSH 512
188
189 static inline struct nv_push *
nvk_cmd_buffer_push(struct nvk_cmd_buffer * cmd,uint32_t dw_count)190 nvk_cmd_buffer_push(struct nvk_cmd_buffer *cmd, uint32_t dw_count)
191 {
192 assert(dw_count <= NVK_CMD_BUFFER_MAX_PUSH);
193
194 /* Compare to the actual limit on our push bo */
195 if (unlikely(cmd->push.end + dw_count > cmd->push_bo_limit))
196 nvk_cmd_buffer_new_push(cmd);
197
198 cmd->push.limit = cmd->push.end + dw_count;
199
200 return &cmd->push;
201 }
202
203 void
204 nvk_cmd_buffer_push_indirect(struct nvk_cmd_buffer *cmd,
205 uint64_t addr, uint32_t dw_count);
206
207 void nvk_cmd_buffer_begin_graphics(struct nvk_cmd_buffer *cmd,
208 const VkCommandBufferBeginInfo *pBeginInfo);
209 void nvk_cmd_buffer_begin_compute(struct nvk_cmd_buffer *cmd,
210 const VkCommandBufferBeginInfo *pBeginInfo);
211
212 void nvk_cmd_invalidate_graphics_state(struct nvk_cmd_buffer *cmd);
213 void nvk_cmd_invalidate_compute_state(struct nvk_cmd_buffer *cmd);
214
215 void nvk_cmd_bind_shaders(struct vk_command_buffer *vk_cmd,
216 uint32_t stage_count,
217 const gl_shader_stage *stages,
218 struct vk_shader ** const shaders);
219
220 void nvk_cmd_bind_graphics_shader(struct nvk_cmd_buffer *cmd,
221 const gl_shader_stage stage,
222 struct nvk_shader *shader);
223
224 void nvk_cmd_bind_compute_shader(struct nvk_cmd_buffer *cmd,
225 struct nvk_shader *shader);
226
227 void nvk_cmd_bind_vertex_buffer(struct nvk_cmd_buffer *cmd, uint32_t vb_idx,
228 struct nvk_addr_range addr_range);
229
230 static inline struct nvk_descriptor_state *
nvk_get_descriptors_state(struct nvk_cmd_buffer * cmd,VkPipelineBindPoint bind_point)231 nvk_get_descriptors_state(struct nvk_cmd_buffer *cmd,
232 VkPipelineBindPoint bind_point)
233 {
234 switch (bind_point) {
235 case VK_PIPELINE_BIND_POINT_GRAPHICS:
236 return &cmd->state.gfx.descriptors;
237 case VK_PIPELINE_BIND_POINT_COMPUTE:
238 return &cmd->state.cs.descriptors;
239 default:
240 unreachable("Unhandled bind point");
241 }
242 };
243
244 VkResult nvk_cmd_buffer_upload_alloc(struct nvk_cmd_buffer *cmd,
245 uint32_t size, uint32_t alignment,
246 uint64_t *addr, void **ptr);
247
248 VkResult nvk_cmd_buffer_upload_data(struct nvk_cmd_buffer *cmd,
249 const void *data, uint32_t size,
250 uint32_t alignment, uint64_t *addr);
251
252 VkResult nvk_cmd_buffer_cond_render_alloc(struct nvk_cmd_buffer *cmd,
253 uint64_t *addr);
254
255 void nvk_cmd_flush_wait_dep(struct nvk_cmd_buffer *cmd,
256 const VkDependencyInfo *dep,
257 bool wait);
258
259 void nvk_cmd_invalidate_deps(struct nvk_cmd_buffer *cmd,
260 uint32_t dep_count,
261 const VkDependencyInfo *deps);
262
263 void
264 nvk_cmd_buffer_flush_push_descriptors(struct nvk_cmd_buffer *cmd,
265 struct nvk_descriptor_state *desc);
266
267 bool
268 nvk_cmd_buffer_get_cbuf_descriptor(struct nvk_cmd_buffer *cmd,
269 const struct nvk_descriptor_state *desc,
270 const struct nvk_shader *shader,
271 const struct nvk_cbuf *cbuf,
272 struct nvk_buffer_address *desc_out);
273 uint64_t
274 nvk_cmd_buffer_get_cbuf_descriptor_addr(struct nvk_cmd_buffer *cmd,
275 const struct nvk_descriptor_state *desc,
276 const struct nvk_cbuf *cbuf);
277
278 void nvk_meta_resolve_rendering(struct nvk_cmd_buffer *cmd,
279 const VkRenderingInfo *pRenderingInfo);
280
281 void nvk_cmd_buffer_dump(struct nvk_cmd_buffer *cmd, FILE *fp);
282
283 #endif
284