1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_render_pass.h"
12
13 #include "venus-protocol/vn_protocol_driver_framebuffer.h"
14 #include "venus-protocol/vn_protocol_driver_render_pass.h"
15
16 #include "vn_device.h"
17 #include "vn_image.h"
18
19 #define COUNT_PRESENT_SRC(atts, att_count, initial_count, final_count) \
20 do { \
21 *initial_count = 0; \
22 *final_count = 0; \
23 for (uint32_t i = 0; i < att_count; i++) { \
24 if (atts[i].initialLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) \
25 (*initial_count)++; \
26 if (atts[i].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) \
27 (*final_count)++; \
28 } \
29 } while (false)
30
31 #define REPLACE_PRESENT_SRC(pass, atts, att_count, out_atts) \
32 do { \
33 struct vn_present_src_attachment *_acquire_atts = \
34 pass->present_src_attachments; \
35 struct vn_present_src_attachment *_release_atts = \
36 _acquire_atts + pass->acquire_count; \
37 \
38 memcpy(out_atts, atts, sizeof(*atts) * att_count); \
39 for (uint32_t i = 0; i < att_count; i++) { \
40 if (out_atts[i].initialLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) { \
41 out_atts[i].initialLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT; \
42 _acquire_atts->acquire = true; \
43 _acquire_atts->index = i; \
44 _acquire_atts++; \
45 } \
46 if (out_atts[i].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) { \
47 out_atts[i].finalLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT; \
48 _release_atts->acquire = false; \
49 _release_atts->index = i; \
50 _release_atts++; \
51 } \
52 } \
53 } while (false)
54
55 static void
vn_render_pass_count_present_src(const VkRenderPassCreateInfo * create_info,uint32_t * initial_count,uint32_t * final_count)56 vn_render_pass_count_present_src(const VkRenderPassCreateInfo *create_info,
57 uint32_t *initial_count,
58 uint32_t *final_count)
59 {
60 COUNT_PRESENT_SRC(create_info->pAttachments, create_info->attachmentCount,
61 initial_count, final_count);
62 }
63
64 static void
vn_render_pass_count_present_src2(const VkRenderPassCreateInfo2 * create_info,uint32_t * initial_count,uint32_t * final_count)65 vn_render_pass_count_present_src2(const VkRenderPassCreateInfo2 *create_info,
66 uint32_t *initial_count,
67 uint32_t *final_count)
68 {
69 COUNT_PRESENT_SRC(create_info->pAttachments, create_info->attachmentCount,
70 initial_count, final_count);
71 }
72
73 static void
vn_render_pass_replace_present_src(struct vn_render_pass * pass,const VkRenderPassCreateInfo * create_info,VkAttachmentDescription * out_atts)74 vn_render_pass_replace_present_src(struct vn_render_pass *pass,
75 const VkRenderPassCreateInfo *create_info,
76 VkAttachmentDescription *out_atts)
77 {
78 REPLACE_PRESENT_SRC(pass, create_info->pAttachments,
79 create_info->attachmentCount, out_atts);
80 }
81
82 static void
vn_render_pass_replace_present_src2(struct vn_render_pass * pass,const VkRenderPassCreateInfo2 * create_info,VkAttachmentDescription2 * out_atts)83 vn_render_pass_replace_present_src2(struct vn_render_pass *pass,
84 const VkRenderPassCreateInfo2 *create_info,
85 VkAttachmentDescription2 *out_atts)
86 {
87 REPLACE_PRESENT_SRC(pass, create_info->pAttachments,
88 create_info->attachmentCount, out_atts);
89 }
90
91 static void
vn_render_pass_setup_present_src_barriers(struct vn_render_pass * pass)92 vn_render_pass_setup_present_src_barriers(struct vn_render_pass *pass)
93 {
94 /* TODO parse VkSubpassDependency for more accurate barriers */
95 for (uint32_t i = 0; i < pass->present_src_count; i++) {
96 struct vn_present_src_attachment *att =
97 &pass->present_src_attachments[i];
98
99 if (att->acquire) {
100 att->src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
101 att->src_access_mask = 0;
102
103 att->dst_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
104 att->dst_access_mask =
105 VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
106 } else {
107 att->src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
108 att->src_access_mask = VK_ACCESS_MEMORY_WRITE_BIT;
109
110 att->dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
111 att->dst_access_mask = 0;
112 }
113 }
114 }
115
116 static struct vn_render_pass *
vn_render_pass_create(struct vn_device * dev,uint32_t acquire_count,uint32_t release_count,const VkAllocationCallbacks * alloc)117 vn_render_pass_create(struct vn_device *dev,
118 uint32_t acquire_count,
119 uint32_t release_count,
120 const VkAllocationCallbacks *alloc)
121 {
122 const uint32_t total_count = acquire_count + release_count;
123 struct vn_render_pass *pass = vk_zalloc(
124 alloc,
125 sizeof(*pass) + sizeof(pass->present_src_attachments[0]) * total_count,
126 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
127 if (!pass)
128 return NULL;
129
130 vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
131
132 pass->acquire_count = acquire_count;
133 pass->release_count = release_count;
134 pass->present_src_count = total_count;
135
136 return pass;
137 }
138
139 /* render pass commands */
140
141 VkResult
vn_CreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)142 vn_CreateRenderPass(VkDevice device,
143 const VkRenderPassCreateInfo *pCreateInfo,
144 const VkAllocationCallbacks *pAllocator,
145 VkRenderPass *pRenderPass)
146 {
147 struct vn_device *dev = vn_device_from_handle(device);
148 const VkAllocationCallbacks *alloc =
149 pAllocator ? pAllocator : &dev->base.base.alloc;
150
151 uint32_t acquire_count;
152 uint32_t release_count;
153 vn_render_pass_count_present_src(pCreateInfo, &acquire_count,
154 &release_count);
155
156 struct vn_render_pass *pass =
157 vn_render_pass_create(dev, acquire_count, release_count, alloc);
158 if (!pass)
159 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
160
161 VkRenderPassCreateInfo local_pass_info;
162 if (pass->present_src_count) {
163 VkAttachmentDescription *temp_atts =
164 vk_alloc(alloc, sizeof(*temp_atts) * pCreateInfo->attachmentCount,
165 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
166 if (!temp_atts) {
167 vk_free(alloc, pass);
168 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
169 }
170
171 vn_render_pass_replace_present_src(pass, pCreateInfo, temp_atts);
172 vn_render_pass_setup_present_src_barriers(pass);
173
174 local_pass_info = *pCreateInfo;
175 local_pass_info.pAttachments = temp_atts;
176 pCreateInfo = &local_pass_info;
177 }
178
179 VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
180 vn_async_vkCreateRenderPass(dev->instance, device, pCreateInfo, NULL,
181 &pass_handle);
182
183 if (pCreateInfo == &local_pass_info)
184 vk_free(alloc, (void *)local_pass_info.pAttachments);
185
186 *pRenderPass = pass_handle;
187
188 return VK_SUCCESS;
189 }
190
191 VkResult
vn_CreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)192 vn_CreateRenderPass2(VkDevice device,
193 const VkRenderPassCreateInfo2 *pCreateInfo,
194 const VkAllocationCallbacks *pAllocator,
195 VkRenderPass *pRenderPass)
196 {
197 struct vn_device *dev = vn_device_from_handle(device);
198 const VkAllocationCallbacks *alloc =
199 pAllocator ? pAllocator : &dev->base.base.alloc;
200
201 uint32_t acquire_count;
202 uint32_t release_count;
203 vn_render_pass_count_present_src2(pCreateInfo, &acquire_count,
204 &release_count);
205
206 struct vn_render_pass *pass =
207 vn_render_pass_create(dev, acquire_count, release_count, alloc);
208 if (!pass)
209 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
210
211 VkRenderPassCreateInfo2 local_pass_info;
212 if (pass->present_src_count) {
213 VkAttachmentDescription2 *temp_atts =
214 vk_alloc(alloc, sizeof(*temp_atts) * pCreateInfo->attachmentCount,
215 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
216 if (!temp_atts) {
217 vk_free(alloc, pass);
218 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
219 }
220
221 vn_render_pass_replace_present_src2(pass, pCreateInfo, temp_atts);
222 vn_render_pass_setup_present_src_barriers(pass);
223
224 local_pass_info = *pCreateInfo;
225 local_pass_info.pAttachments = temp_atts;
226 pCreateInfo = &local_pass_info;
227 }
228
229 VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
230 vn_async_vkCreateRenderPass2(dev->instance, device, pCreateInfo, NULL,
231 &pass_handle);
232
233 if (pCreateInfo == &local_pass_info)
234 vk_free(alloc, (void *)local_pass_info.pAttachments);
235
236 *pRenderPass = pass_handle;
237
238 return VK_SUCCESS;
239 }
240
241 void
vn_DestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)242 vn_DestroyRenderPass(VkDevice device,
243 VkRenderPass renderPass,
244 const VkAllocationCallbacks *pAllocator)
245 {
246 struct vn_device *dev = vn_device_from_handle(device);
247 struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
248 const VkAllocationCallbacks *alloc =
249 pAllocator ? pAllocator : &dev->base.base.alloc;
250
251 if (!pass)
252 return;
253
254 vn_async_vkDestroyRenderPass(dev->instance, device, renderPass, NULL);
255
256 vn_object_base_fini(&pass->base);
257 vk_free(alloc, pass);
258 }
259
260 void
vn_GetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)261 vn_GetRenderAreaGranularity(VkDevice device,
262 VkRenderPass renderPass,
263 VkExtent2D *pGranularity)
264 {
265 struct vn_device *dev = vn_device_from_handle(device);
266 struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
267
268 if (!pass->granularity.width) {
269 vn_call_vkGetRenderAreaGranularity(dev->instance, device, renderPass,
270 &pass->granularity);
271 }
272
273 *pGranularity = pass->granularity;
274 }
275
276 /* framebuffer commands */
277
278 VkResult
vn_CreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)279 vn_CreateFramebuffer(VkDevice device,
280 const VkFramebufferCreateInfo *pCreateInfo,
281 const VkAllocationCallbacks *pAllocator,
282 VkFramebuffer *pFramebuffer)
283 {
284 struct vn_device *dev = vn_device_from_handle(device);
285 const VkAllocationCallbacks *alloc =
286 pAllocator ? pAllocator : &dev->base.base.alloc;
287
288 /* Two render passes differ only in attachment image layouts are considered
289 * compatible. We must not use pCreateInfo->renderPass here.
290 */
291 const bool imageless =
292 pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT;
293 const uint32_t view_count = imageless ? 0 : pCreateInfo->attachmentCount;
294
295 struct vn_framebuffer *fb =
296 vk_zalloc(alloc, sizeof(*fb) + sizeof(*fb->image_views) * view_count,
297 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
298 if (!fb)
299 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
300
301 vn_object_base_init(&fb->base, VK_OBJECT_TYPE_FRAMEBUFFER, &dev->base);
302
303 fb->image_view_count = view_count;
304 memcpy(fb->image_views, pCreateInfo->pAttachments,
305 sizeof(*pCreateInfo->pAttachments) * view_count);
306
307 VkFramebuffer fb_handle = vn_framebuffer_to_handle(fb);
308 vn_async_vkCreateFramebuffer(dev->instance, device, pCreateInfo, NULL,
309 &fb_handle);
310
311 *pFramebuffer = fb_handle;
312
313 return VK_SUCCESS;
314 }
315
316 void
vn_DestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)317 vn_DestroyFramebuffer(VkDevice device,
318 VkFramebuffer framebuffer,
319 const VkAllocationCallbacks *pAllocator)
320 {
321 struct vn_device *dev = vn_device_from_handle(device);
322 struct vn_framebuffer *fb = vn_framebuffer_from_handle(framebuffer);
323 const VkAllocationCallbacks *alloc =
324 pAllocator ? pAllocator : &dev->base.base.alloc;
325
326 if (!fb)
327 return;
328
329 vn_async_vkDestroyFramebuffer(dev->instance, device, framebuffer, NULL);
330
331 vn_object_base_fini(&fb->base);
332 vk_free(alloc, fb);
333 }
334