• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3  * SPDX-License-Identifier: MIT
4  */
5 #include "nvk_buffer.h"
6 #include "nvk_cmd_buffer.h"
7 #include "nvk_device.h"
8 #include "nvk_entrypoints.h"
9 #include "nvk_image.h"
10 #include "nvk_physical_device.h"
11 
12 #include "nvk_cl9097.h"
13 #include "nvk_clb197.h"
14 
15 static VkResult
nvk_cmd_bind_map_buffer(struct vk_command_buffer * vk_cmd,struct vk_meta_device * meta,VkBuffer _buffer,void ** map_out)16 nvk_cmd_bind_map_buffer(struct vk_command_buffer *vk_cmd,
17                         struct vk_meta_device *meta,
18                         VkBuffer _buffer, void **map_out)
19 {
20    struct nvk_cmd_buffer *cmd =
21       container_of(vk_cmd, struct nvk_cmd_buffer, vk);
22    VK_FROM_HANDLE(nvk_buffer, buffer, _buffer);
23    VkResult result;
24 
25    uint64_t addr;
26    assert(buffer->vk.size < UINT_MAX);
27    result = nvk_cmd_buffer_upload_alloc(cmd, buffer->vk.size, 16,
28                                         &addr, map_out);
29    if (unlikely(result != VK_SUCCESS))
30       return result;
31 
32    buffer->addr = addr;
33 
34    return VK_SUCCESS;
35 }
36 
37 VkResult
nvk_device_init_meta(struct nvk_device * dev)38 nvk_device_init_meta(struct nvk_device *dev)
39 {
40    struct nvk_physical_device *pdev = nvk_device_physical(dev);
41 
42    VkResult result = vk_meta_device_init(&dev->vk, &dev->meta);
43    if (result != VK_SUCCESS)
44       return result;
45 
46    dev->meta.use_gs_for_layer = pdev->info.cls_eng3d < MAXWELL_B,
47    dev->meta.cmd_bind_map_buffer = nvk_cmd_bind_map_buffer;
48    dev->meta.max_bind_map_buffer_size_B = 64 * 1024; /* TODO */
49 
50    return VK_SUCCESS;
51 }
52 
53 void
nvk_device_finish_meta(struct nvk_device * dev)54 nvk_device_finish_meta(struct nvk_device *dev)
55 {
56    vk_meta_device_finish(&dev->vk, &dev->meta);
57 }
58 
59 struct nvk_meta_save {
60    struct vk_vertex_input_state _dynamic_vi;
61    struct vk_sample_locations_state _dynamic_sl;
62    struct vk_dynamic_graphics_state dynamic;
63    struct nvk_shader *shaders[MESA_SHADER_MESH + 1];
64    struct nvk_addr_range vb0;
65    struct nvk_descriptor_set *desc0;
66    bool has_push_desc0;
67    struct nvk_push_descriptor_set push_desc0;
68    uint8_t push[128];
69 };
70 
71 static void
nvk_meta_begin(struct nvk_cmd_buffer * cmd,struct nvk_meta_save * save)72 nvk_meta_begin(struct nvk_cmd_buffer *cmd,
73                struct nvk_meta_save *save)
74 {
75    save->dynamic = cmd->vk.dynamic_graphics_state;
76    save->_dynamic_vi = cmd->state.gfx._dynamic_vi;
77    save->_dynamic_sl = cmd->state.gfx._dynamic_sl;
78 
79    STATIC_ASSERT(sizeof(cmd->state.gfx.shaders) == sizeof(save->shaders));
80    memcpy(save->shaders, cmd->state.gfx.shaders, sizeof(save->shaders));
81 
82    save->vb0 = cmd->state.gfx.vb0;
83 
84    save->desc0 = cmd->state.gfx.descriptors.sets[0];
85    save->has_push_desc0 = cmd->state.gfx.descriptors.push[0];
86    if (save->has_push_desc0)
87       save->push_desc0 = *cmd->state.gfx.descriptors.push[0];
88 
89    STATIC_ASSERT(sizeof(save->push) ==
90                  sizeof(cmd->state.gfx.descriptors.root.push));
91    memcpy(save->push, cmd->state.gfx.descriptors.root.push, sizeof(save->push));
92 
93    struct nv_push *p = nvk_cmd_buffer_push(cmd, 2);
94    P_IMMD(p, NV9097, SET_STATISTICS_COUNTER, {
95       .da_vertices_generated_enable = false,
96       .da_primitives_generated_enable = false,
97       .vs_invocations_enable = false,
98       .gs_invocations_enable = false,
99       .gs_primitives_generated_enable = false,
100       .streaming_primitives_succeeded_enable = false,
101       .streaming_primitives_needed_enable = false,
102       .clipper_invocations_enable = false,
103       .clipper_primitives_generated_enable = false,
104       .ps_invocations_enable = false,
105       .ti_invocations_enable = false,
106       .ts_invocations_enable = false,
107       .ts_primitives_generated_enable = false,
108       .total_streaming_primitives_needed_succeeded_enable = false,
109       .vtg_primitives_out_enable = false,
110    });
111 }
112 
113 static void
nvk_meta_init_render(struct nvk_cmd_buffer * cmd,struct vk_meta_rendering_info * info)114 nvk_meta_init_render(struct nvk_cmd_buffer *cmd,
115                      struct vk_meta_rendering_info *info)
116 {
117    const struct nvk_rendering_state *render = &cmd->state.gfx.render;
118 
119    *info = (struct vk_meta_rendering_info) {
120       .view_mask = render->view_mask,
121       .color_attachment_count = render->color_att_count,
122       .depth_attachment_format = render->depth_att.vk_format,
123       .stencil_attachment_format = render->stencil_att.vk_format,
124    };
125    for (uint32_t a = 0; a < render->color_att_count; a++)
126       info->color_attachment_formats[a] = render->color_att[a].vk_format;
127 }
128 
129 static void
nvk_meta_end(struct nvk_cmd_buffer * cmd,struct nvk_meta_save * save)130 nvk_meta_end(struct nvk_cmd_buffer *cmd,
131              struct nvk_meta_save *save)
132 {
133    if (save->desc0) {
134       cmd->state.gfx.descriptors.sets[0] = save->desc0;
135       cmd->state.gfx.descriptors.root.sets[0] = nvk_descriptor_set_addr(save->desc0);
136       cmd->state.gfx.descriptors.sets_dirty |= BITFIELD_BIT(0);
137       cmd->state.gfx.descriptors.push_dirty &= ~BITFIELD_BIT(0);
138    } else if (save->has_push_desc0) {
139       *cmd->state.gfx.descriptors.push[0] = save->push_desc0;
140       cmd->state.gfx.descriptors.push_dirty |= BITFIELD_BIT(0);
141    }
142 
143    /* Restore the dynamic state */
144    assert(save->dynamic.vi == &cmd->state.gfx._dynamic_vi);
145    assert(save->dynamic.ms.sample_locations == &cmd->state.gfx._dynamic_sl);
146    cmd->vk.dynamic_graphics_state = save->dynamic;
147    cmd->state.gfx._dynamic_vi = save->_dynamic_vi;
148    cmd->state.gfx._dynamic_sl = save->_dynamic_sl;
149    memcpy(cmd->vk.dynamic_graphics_state.dirty,
150           cmd->vk.dynamic_graphics_state.set,
151           sizeof(cmd->vk.dynamic_graphics_state.set));
152 
153    for (uint32_t stage = 0; stage < ARRAY_SIZE(save->shaders); stage++) {
154       if (stage == MESA_SHADER_COMPUTE)
155          continue;
156 
157       nvk_cmd_bind_graphics_shader(cmd, stage, save->shaders[stage]);
158    }
159 
160    nvk_cmd_bind_vertex_buffer(cmd, 0, save->vb0);
161 
162    memcpy(cmd->state.gfx.descriptors.root.push, save->push, sizeof(save->push));
163 
164    struct nv_push *p = nvk_cmd_buffer_push(cmd, 2);
165    P_IMMD(p, NV9097, SET_STATISTICS_COUNTER, {
166       .da_vertices_generated_enable = true,
167       .da_primitives_generated_enable = true,
168       .vs_invocations_enable = true,
169       .gs_invocations_enable = true,
170       .gs_primitives_generated_enable = true,
171       .streaming_primitives_succeeded_enable = true,
172       .streaming_primitives_needed_enable = true,
173       .clipper_invocations_enable = true,
174       .clipper_primitives_generated_enable = true,
175       .ps_invocations_enable = true,
176       .ti_invocations_enable = true,
177       .ts_invocations_enable = true,
178       .ts_primitives_generated_enable = true,
179       .total_streaming_primitives_needed_succeeded_enable = true,
180       .vtg_primitives_out_enable = true,
181    });
182 }
183 
184 VKAPI_ATTR void VKAPI_CALL
nvk_CmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)185 nvk_CmdBlitImage2(VkCommandBuffer commandBuffer,
186                   const VkBlitImageInfo2 *pBlitImageInfo)
187 {
188    VK_FROM_HANDLE(nvk_cmd_buffer, cmd, commandBuffer);
189    struct nvk_device *dev = nvk_cmd_buffer_device(cmd);
190 
191    struct nvk_meta_save save;
192    nvk_meta_begin(cmd, &save);
193 
194    vk_meta_blit_image2(&cmd->vk, &dev->meta, pBlitImageInfo);
195 
196    nvk_meta_end(cmd, &save);
197 }
198 
199 VKAPI_ATTR void VKAPI_CALL
nvk_CmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)200 nvk_CmdResolveImage2(VkCommandBuffer commandBuffer,
201                      const VkResolveImageInfo2 *pResolveImageInfo)
202 {
203    VK_FROM_HANDLE(nvk_cmd_buffer, cmd, commandBuffer);
204    struct nvk_device *dev = nvk_cmd_buffer_device(cmd);
205 
206    struct nvk_meta_save save;
207    nvk_meta_begin(cmd, &save);
208 
209    vk_meta_resolve_image2(&cmd->vk, &dev->meta, pResolveImageInfo);
210 
211    nvk_meta_end(cmd, &save);
212 }
213 
214 void
nvk_meta_resolve_rendering(struct nvk_cmd_buffer * cmd,const VkRenderingInfo * pRenderingInfo)215 nvk_meta_resolve_rendering(struct nvk_cmd_buffer *cmd,
216                            const VkRenderingInfo *pRenderingInfo)
217 {
218    struct nvk_device *dev = nvk_cmd_buffer_device(cmd);
219 
220    struct nvk_meta_save save;
221    nvk_meta_begin(cmd, &save);
222 
223    vk_meta_resolve_rendering(&cmd->vk, &dev->meta, pRenderingInfo);
224 
225    nvk_meta_end(cmd, &save);
226 }
227