1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_render_pass.h"
12
13 #include "venus-protocol/vn_protocol_driver_framebuffer.h"
14 #include "venus-protocol/vn_protocol_driver_render_pass.h"
15 #include "vk_format.h"
16
17 #include "vn_device.h"
18 #include "vn_image.h"
19
20 #define COUNT_PRESENT_SRC(atts, att_count, initial_count, final_count) \
21 do { \
22 *initial_count = 0; \
23 *final_count = 0; \
24 for (uint32_t i = 0; i < att_count; i++) { \
25 if (atts[i].initialLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) \
26 (*initial_count)++; \
27 if (atts[i].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) \
28 (*final_count)++; \
29 } \
30 } while (false)
31
32 #define REPLACE_PRESENT_SRC(pass, atts, att_count, out_atts) \
33 do { \
34 struct vn_present_src_attachment *_acquire_atts = \
35 pass->present_acquire_attachments; \
36 struct vn_present_src_attachment *_release_atts = \
37 pass->present_release_attachments; \
38 \
39 memcpy(out_atts, atts, sizeof(*atts) * att_count); \
40 for (uint32_t i = 0; i < att_count; i++) { \
41 if (out_atts[i].initialLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) { \
42 out_atts[i].initialLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT; \
43 _acquire_atts->index = i; \
44 _acquire_atts++; \
45 } \
46 if (out_atts[i].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) { \
47 out_atts[i].finalLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT; \
48 _release_atts->index = i; \
49 _release_atts++; \
50 } \
51 } \
52 } while (false)
53
54 #define INIT_SUBPASSES(_pass, _pCreateInfo) \
55 do { \
56 for (uint32_t i = 0; i < _pCreateInfo->subpassCount; i++) { \
57 __auto_type subpass_desc = &_pCreateInfo->pSubpasses[i]; \
58 struct vn_subpass *subpass = &_pass->subpasses[i]; \
59 \
60 for (uint32_t j = 0; j < subpass_desc->colorAttachmentCount; j++) { \
61 if (subpass_desc->pColorAttachments[j].attachment != \
62 VK_ATTACHMENT_UNUSED) { \
63 subpass->attachment_aspects |= VK_IMAGE_ASPECT_COLOR_BIT; \
64 break; \
65 } \
66 } \
67 \
68 if (subpass_desc->pDepthStencilAttachment && \
69 subpass_desc->pDepthStencilAttachment->attachment != \
70 VK_ATTACHMENT_UNUSED) { \
71 uint32_t att = \
72 subpass_desc->pDepthStencilAttachment->attachment; \
73 subpass->attachment_aspects |= \
74 vk_format_aspects(_pCreateInfo->pAttachments[att].format); \
75 } \
76 } \
77 } while (false)
78
79 static inline void
vn_render_pass_count_present_src(const VkRenderPassCreateInfo * create_info,uint32_t * initial_count,uint32_t * final_count)80 vn_render_pass_count_present_src(const VkRenderPassCreateInfo *create_info,
81 uint32_t *initial_count,
82 uint32_t *final_count)
83 {
84 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
85 *initial_count = *final_count = 0;
86 return;
87 }
88 COUNT_PRESENT_SRC(create_info->pAttachments, create_info->attachmentCount,
89 initial_count, final_count);
90 }
91
92 static inline void
vn_render_pass_count_present_src2(const VkRenderPassCreateInfo2 * create_info,uint32_t * initial_count,uint32_t * final_count)93 vn_render_pass_count_present_src2(const VkRenderPassCreateInfo2 *create_info,
94 uint32_t *initial_count,
95 uint32_t *final_count)
96 {
97 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
98 *initial_count = *final_count = 0;
99 return;
100 }
101 COUNT_PRESENT_SRC(create_info->pAttachments, create_info->attachmentCount,
102 initial_count, final_count);
103 }
104
105 static inline void
vn_render_pass_replace_present_src(struct vn_render_pass * pass,const VkRenderPassCreateInfo * create_info,VkAttachmentDescription * out_atts)106 vn_render_pass_replace_present_src(struct vn_render_pass *pass,
107 const VkRenderPassCreateInfo *create_info,
108 VkAttachmentDescription *out_atts)
109 {
110 REPLACE_PRESENT_SRC(pass, create_info->pAttachments,
111 create_info->attachmentCount, out_atts);
112 }
113
114 static inline void
vn_render_pass_replace_present_src2(struct vn_render_pass * pass,const VkRenderPassCreateInfo2 * create_info,VkAttachmentDescription2 * out_atts)115 vn_render_pass_replace_present_src2(struct vn_render_pass *pass,
116 const VkRenderPassCreateInfo2 *create_info,
117 VkAttachmentDescription2 *out_atts)
118 {
119 REPLACE_PRESENT_SRC(pass, create_info->pAttachments,
120 create_info->attachmentCount, out_atts);
121 }
122
123 static void
vn_render_pass_setup_present_src_barriers(struct vn_render_pass * pass)124 vn_render_pass_setup_present_src_barriers(struct vn_render_pass *pass)
125 {
126 /* TODO parse VkSubpassDependency for more accurate barriers */
127
128 for (uint32_t i = 0; i < pass->present_acquire_count; i++) {
129 struct vn_present_src_attachment *att =
130 &pass->present_acquire_attachments[i];
131
132 att->src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
133 att->src_access_mask = 0;
134 att->dst_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
135 att->dst_access_mask =
136 VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
137 }
138
139 for (uint32_t i = 0; i < pass->present_release_count; i++) {
140 struct vn_present_src_attachment *att =
141 &pass->present_release_attachments[i];
142
143 att->src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
144 att->src_access_mask = VK_ACCESS_MEMORY_WRITE_BIT;
145 att->dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
146 att->dst_access_mask = 0;
147 }
148 }
149
150 static struct vn_render_pass *
vn_render_pass_create(struct vn_device * dev,uint32_t present_acquire_count,uint32_t present_release_count,uint32_t subpass_count,const VkAllocationCallbacks * alloc)151 vn_render_pass_create(struct vn_device *dev,
152 uint32_t present_acquire_count,
153 uint32_t present_release_count,
154 uint32_t subpass_count,
155 const VkAllocationCallbacks *alloc)
156 {
157 uint32_t present_count = present_acquire_count + present_release_count;
158 struct vn_render_pass *pass;
159 struct vn_present_src_attachment *present_atts;
160 struct vn_subpass *subpasses;
161
162 VK_MULTIALLOC(ma);
163 vk_multialloc_add(&ma, &pass, __typeof__(*pass), 1);
164 vk_multialloc_add(&ma, &present_atts, __typeof__(*present_atts),
165 present_count);
166 vk_multialloc_add(&ma, &subpasses, __typeof__(*subpasses), subpass_count);
167
168 if (!vk_multialloc_zalloc(&ma, alloc, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT))
169 return NULL;
170
171 vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
172
173 pass->present_count = present_count;
174 pass->present_acquire_count = present_acquire_count;
175 pass->present_release_count = present_release_count;
176 pass->subpass_count = subpass_count;
177
178 /* For each array pointer, set it only if its count != 0. This allows code
179 * elsewhere to intuitively use either condition, `foo_atts == NULL` or
180 * `foo_count != 0`.
181 */
182 if (present_count)
183 pass->present_attachments = present_atts;
184 if (present_acquire_count)
185 pass->present_acquire_attachments = present_atts;
186 if (present_release_count)
187 pass->present_release_attachments =
188 present_atts + present_acquire_count;
189 if (subpass_count)
190 pass->subpasses = subpasses;
191
192 return pass;
193 }
194
195 /* render pass commands */
196
197 VkResult
vn_CreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)198 vn_CreateRenderPass(VkDevice device,
199 const VkRenderPassCreateInfo *pCreateInfo,
200 const VkAllocationCallbacks *pAllocator,
201 VkRenderPass *pRenderPass)
202 {
203 struct vn_device *dev = vn_device_from_handle(device);
204 const VkAllocationCallbacks *alloc =
205 pAllocator ? pAllocator : &dev->base.base.alloc;
206
207 uint32_t acquire_count;
208 uint32_t release_count;
209 vn_render_pass_count_present_src(pCreateInfo, &acquire_count,
210 &release_count);
211
212 struct vn_render_pass *pass = vn_render_pass_create(
213 dev, acquire_count, release_count, pCreateInfo->subpassCount, alloc);
214 if (!pass)
215 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
216
217 INIT_SUBPASSES(pass, pCreateInfo);
218
219 VkRenderPassCreateInfo local_pass_info;
220 if (pass->present_count) {
221 VkAttachmentDescription *temp_atts =
222 vk_alloc(alloc, sizeof(*temp_atts) * pCreateInfo->attachmentCount,
223 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
224 if (!temp_atts) {
225 vk_free(alloc, pass);
226 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
227 }
228
229 vn_render_pass_replace_present_src(pass, pCreateInfo, temp_atts);
230 vn_render_pass_setup_present_src_barriers(pass);
231
232 local_pass_info = *pCreateInfo;
233 local_pass_info.pAttachments = temp_atts;
234 pCreateInfo = &local_pass_info;
235 }
236
237 const struct VkRenderPassMultiviewCreateInfo *multiview_info =
238 vk_find_struct_const(pCreateInfo->pNext,
239 RENDER_PASS_MULTIVIEW_CREATE_INFO);
240
241 /* Store the viewMask of each subpass for query feedback */
242 if (multiview_info) {
243 for (uint32_t i = 0; i < multiview_info->subpassCount; i++)
244 pass->subpasses[i].view_mask = multiview_info->pViewMasks[i];
245 }
246
247 VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
248 vn_async_vkCreateRenderPass(dev->primary_ring, device, pCreateInfo, NULL,
249 &pass_handle);
250
251 if (pCreateInfo == &local_pass_info)
252 vk_free(alloc, (void *)local_pass_info.pAttachments);
253
254 *pRenderPass = pass_handle;
255
256 return VK_SUCCESS;
257 }
258
259 VkResult
vn_CreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)260 vn_CreateRenderPass2(VkDevice device,
261 const VkRenderPassCreateInfo2 *pCreateInfo,
262 const VkAllocationCallbacks *pAllocator,
263 VkRenderPass *pRenderPass)
264 {
265 struct vn_device *dev = vn_device_from_handle(device);
266 const VkAllocationCallbacks *alloc =
267 pAllocator ? pAllocator : &dev->base.base.alloc;
268
269 uint32_t acquire_count;
270 uint32_t release_count;
271 vn_render_pass_count_present_src2(pCreateInfo, &acquire_count,
272 &release_count);
273
274 struct vn_render_pass *pass = vn_render_pass_create(
275 dev, acquire_count, release_count, pCreateInfo->subpassCount, alloc);
276 if (!pass)
277 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
278
279 INIT_SUBPASSES(pass, pCreateInfo);
280
281 VkRenderPassCreateInfo2 local_pass_info;
282 if (pass->present_count) {
283 VkAttachmentDescription2 *temp_atts =
284 vk_alloc(alloc, sizeof(*temp_atts) * pCreateInfo->attachmentCount,
285 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
286 if (!temp_atts) {
287 vk_free(alloc, pass);
288 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
289 }
290
291 vn_render_pass_replace_present_src2(pass, pCreateInfo, temp_atts);
292 vn_render_pass_setup_present_src_barriers(pass);
293
294 local_pass_info = *pCreateInfo;
295 local_pass_info.pAttachments = temp_atts;
296 pCreateInfo = &local_pass_info;
297 }
298
299 /* Store the viewMask of each subpass for query feedback */
300 for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++)
301 pass->subpasses[i].view_mask = pCreateInfo->pSubpasses[i].viewMask;
302
303 VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
304 vn_async_vkCreateRenderPass2(dev->primary_ring, device, pCreateInfo, NULL,
305 &pass_handle);
306
307 if (pCreateInfo == &local_pass_info)
308 vk_free(alloc, (void *)local_pass_info.pAttachments);
309
310 *pRenderPass = pass_handle;
311
312 return VK_SUCCESS;
313 }
314
315 void
vn_DestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)316 vn_DestroyRenderPass(VkDevice device,
317 VkRenderPass renderPass,
318 const VkAllocationCallbacks *pAllocator)
319 {
320 struct vn_device *dev = vn_device_from_handle(device);
321 struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
322 const VkAllocationCallbacks *alloc =
323 pAllocator ? pAllocator : &dev->base.base.alloc;
324
325 if (!pass)
326 return;
327
328 vn_async_vkDestroyRenderPass(dev->primary_ring, device, renderPass, NULL);
329
330 vn_object_base_fini(&pass->base);
331 vk_free(alloc, pass);
332 }
333
334 void
vn_GetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)335 vn_GetRenderAreaGranularity(VkDevice device,
336 VkRenderPass renderPass,
337 VkExtent2D *pGranularity)
338 {
339 struct vn_device *dev = vn_device_from_handle(device);
340 struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
341
342 if (!pass->granularity.width) {
343 vn_call_vkGetRenderAreaGranularity(dev->primary_ring, device,
344 renderPass, &pass->granularity);
345 }
346
347 *pGranularity = pass->granularity;
348 }
349
350 /* framebuffer commands */
351
352 VkResult
vn_CreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)353 vn_CreateFramebuffer(VkDevice device,
354 const VkFramebufferCreateInfo *pCreateInfo,
355 const VkAllocationCallbacks *pAllocator,
356 VkFramebuffer *pFramebuffer)
357 {
358 struct vn_device *dev = vn_device_from_handle(device);
359 const VkAllocationCallbacks *alloc =
360 pAllocator ? pAllocator : &dev->base.base.alloc;
361
362 /* Two render passes differ only in attachment image layouts are considered
363 * compatible. We must not use pCreateInfo->renderPass here.
364 */
365 const bool imageless =
366 pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT;
367 const uint32_t view_count = imageless ? 0 : pCreateInfo->attachmentCount;
368
369 struct vn_framebuffer *fb =
370 vk_zalloc(alloc, sizeof(*fb) + sizeof(*fb->image_views) * view_count,
371 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
372 if (!fb)
373 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
374
375 vn_object_base_init(&fb->base, VK_OBJECT_TYPE_FRAMEBUFFER, &dev->base);
376
377 fb->image_view_count = view_count;
378 memcpy(fb->image_views, pCreateInfo->pAttachments,
379 sizeof(*pCreateInfo->pAttachments) * view_count);
380
381 VkFramebuffer fb_handle = vn_framebuffer_to_handle(fb);
382 vn_async_vkCreateFramebuffer(dev->primary_ring, device, pCreateInfo, NULL,
383 &fb_handle);
384
385 *pFramebuffer = fb_handle;
386
387 return VK_SUCCESS;
388 }
389
390 void
vn_DestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)391 vn_DestroyFramebuffer(VkDevice device,
392 VkFramebuffer framebuffer,
393 const VkAllocationCallbacks *pAllocator)
394 {
395 struct vn_device *dev = vn_device_from_handle(device);
396 struct vn_framebuffer *fb = vn_framebuffer_from_handle(framebuffer);
397 const VkAllocationCallbacks *alloc =
398 pAllocator ? pAllocator : &dev->base.base.alloc;
399
400 if (!fb)
401 return;
402
403 vn_async_vkDestroyFramebuffer(dev->primary_ring, device, framebuffer,
404 NULL);
405
406 vn_object_base_fini(&fb->base);
407 vk_free(alloc, fb);
408 }
409