1 /*
2 * Copyright (c) 2015-2019 The Khronos Group Inc.
3 * Copyright (c) 2015-2019 Valve Corporation
4 * Copyright (c) 2015-2019 LunarG, Inc.
5 * Copyright (c) 2015-2019 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
20 * Author: Tony Barbour <tony@LunarG.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 */
23
24 #include "vkrenderframework.h"
25 #include "vk_format_utils.h"
26
27 #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
28 #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
29 { \
30 fp##entrypoint = (PFN_vk##entrypoint)vkGetDeviceProcAddr(dev, "vk" #entrypoint); \
31 assert(fp##entrypoint != NULL); \
32 }
33
VkRenderFramework()34 VkRenderFramework::VkRenderFramework()
35 : inst(VK_NULL_HANDLE),
36 m_device(NULL),
37 m_commandPool(VK_NULL_HANDLE),
38 m_commandBuffer(NULL),
39 m_renderPass(VK_NULL_HANDLE),
40 m_framebuffer(VK_NULL_HANDLE),
41 m_surface(VK_NULL_HANDLE),
42 m_swapchain(VK_NULL_HANDLE),
43 m_addRenderPassSelfDependency(false),
44 m_width(256.0), // default window width
45 m_height(256.0), // default window height
46 m_render_target_fmt(VK_FORMAT_R8G8B8A8_UNORM),
47 m_depth_stencil_fmt(VK_FORMAT_UNDEFINED),
48 m_clear_via_load_op(true),
49 m_depth_clear_color(1.0),
50 m_stencil_clear_color(0),
51 m_depthStencil(NULL),
52 m_CreateDebugReportCallback(VK_NULL_HANDLE),
53 m_DestroyDebugReportCallback(VK_NULL_HANDLE),
54 m_globalMsgCallback(VK_NULL_HANDLE),
55 m_devMsgCallback(VK_NULL_HANDLE) {
56 memset(&m_renderPassBeginInfo, 0, sizeof(m_renderPassBeginInfo));
57 m_renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
58
59 // clear the back buffer to dark grey
60 m_clear_color.float32[0] = 0.25f;
61 m_clear_color.float32[1] = 0.25f;
62 m_clear_color.float32[2] = 0.25f;
63 m_clear_color.float32[3] = 0.0f;
64 }
65
~VkRenderFramework()66 VkRenderFramework::~VkRenderFramework() { ShutdownFramework(); }
67
gpu()68 VkPhysicalDevice VkRenderFramework::gpu() {
69 EXPECT_NE((VkInstance)0, inst); // Invalid to request gpu before instance exists
70 return objs[0];
71 }
72
73 // Return true if layer name is found and spec+implementation values are >= requested values
InstanceLayerSupported(const char * name,uint32_t spec,uint32_t implementation)74 bool VkRenderFramework::InstanceLayerSupported(const char *name, uint32_t spec, uint32_t implementation) {
75 uint32_t layer_count = 0;
76 std::vector<VkLayerProperties> layer_props;
77
78 VkResult res = vkEnumerateInstanceLayerProperties(&layer_count, NULL);
79 if (VK_SUCCESS != res) return false;
80 if (0 == layer_count) return false;
81
82 layer_props.resize(layer_count);
83 res = vkEnumerateInstanceLayerProperties(&layer_count, layer_props.data());
84 if (VK_SUCCESS != res) return false;
85
86 for (auto &it : layer_props) {
87 if (0 == strncmp(name, it.layerName, VK_MAX_EXTENSION_NAME_SIZE)) {
88 return ((it.specVersion >= spec) && (it.implementationVersion >= implementation));
89 }
90 }
91 return false;
92 }
93
94 // Enable device profile as last layer on stack overriding devsim if there, or return if not available
EnableDeviceProfileLayer()95 bool VkRenderFramework::EnableDeviceProfileLayer() {
96 if (InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
97 if (VkTestFramework::m_devsim_layer) {
98 assert(0 == strcmp(m_instance_layer_names.back(), "VK_LAYER_LUNARG_device_simulation"));
99 m_instance_layer_names.pop_back();
100 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
101 } else {
102 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
103 }
104 } else {
105 printf(" Did not find VK_LAYER_LUNARG_device_profile_api layer; skipped.\n");
106 return false;
107 }
108 return true;
109 }
110
111 // Return true if extension name is found and spec value is >= requested spec value
InstanceExtensionSupported(const char * ext_name,uint32_t spec)112 bool VkRenderFramework::InstanceExtensionSupported(const char *ext_name, uint32_t spec) {
113 uint32_t ext_count = 0;
114 std::vector<VkExtensionProperties> ext_props;
115 VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, nullptr);
116 if (VK_SUCCESS != res) return false;
117 if (0 == ext_count) return false;
118
119 ext_props.resize(ext_count);
120 res = vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, ext_props.data());
121 if (VK_SUCCESS != res) return false;
122
123 for (auto &it : ext_props) {
124 if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) {
125 return (it.specVersion >= spec);
126 }
127 }
128 return false;
129 }
130
131 // Return true if instance exists and extension name is in the list
InstanceExtensionEnabled(const char * ext_name)132 bool VkRenderFramework::InstanceExtensionEnabled(const char *ext_name) {
133 if (!inst) return false;
134
135 bool ext_found = false;
136 for (auto ext : m_instance_extension_names) {
137 if (!strcmp(ext, ext_name)) {
138 ext_found = true;
139 break;
140 }
141 }
142 return ext_found;
143 }
144
145 // Return true if extension name is found and spec value is >= requested spec value
DeviceExtensionSupported(VkPhysicalDevice dev,const char * layer,const char * ext_name,uint32_t spec)146 bool VkRenderFramework::DeviceExtensionSupported(VkPhysicalDevice dev, const char *layer, const char *ext_name, uint32_t spec) {
147 if (!inst) {
148 EXPECT_NE((VkInstance)0, inst); // Complain, not cool without an instance
149 return false;
150 }
151 uint32_t ext_count = 0;
152 std::vector<VkExtensionProperties> ext_props;
153 VkResult res = vkEnumerateDeviceExtensionProperties(dev, layer, &ext_count, nullptr);
154 if (VK_SUCCESS != res) return false;
155 if (0 == ext_count) return false;
156
157 ext_props.resize(ext_count);
158 res = vkEnumerateDeviceExtensionProperties(dev, layer, &ext_count, ext_props.data());
159 if (VK_SUCCESS != res) return false;
160
161 for (auto &it : ext_props) {
162 if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) {
163 return (it.specVersion >= spec);
164 }
165 }
166 return false;
167 }
168
169 // Return true if device is created and extension name is found in the list
DeviceExtensionEnabled(const char * ext_name)170 bool VkRenderFramework::DeviceExtensionEnabled(const char *ext_name) {
171 if (NULL == m_device) return false;
172
173 bool ext_found = false;
174 for (auto ext : m_device_extension_names) {
175 if (!strcmp(ext, ext_name)) {
176 ext_found = true;
177 break;
178 }
179 }
180 return ext_found;
181 }
182
183 // WARNING: The DevSim layer can override the properties that are tested here, making the result of
184 // this function dubious when DevSim is active.
DeviceIsMockICD()185 bool VkRenderFramework::DeviceIsMockICD() {
186 VkPhysicalDeviceProperties props = vk_testing::PhysicalDevice(gpu()).properties();
187 if ((props.vendorID == 0xba5eba11) && (props.deviceID == 0xf005ba11) && (0 == strcmp("Vulkan Mock Device", props.deviceName))) {
188 return true;
189 }
190 return false;
191 }
192
193 // Some tests may need to be skipped if the devsim layer is in use.
DeviceSimulation()194 bool VkRenderFramework::DeviceSimulation() { return m_devsim_layer; }
195
196 // Render into a RenderTarget and read the pixels back to see if the device can really draw.
197 // Note: This cannot be called from inside an initialized VkRenderFramework because frameworks cannot be "nested".
198 // It is best to call it before "Init()".
DeviceCanDraw()199 bool VkRenderFramework::DeviceCanDraw() {
200 InitFramework(NULL, NULL);
201 InitState(NULL, NULL, 0);
202 InitViewport();
203 InitRenderTarget();
204
205 // Draw a triangle that covers the entire viewport.
206 char const *vsSource =
207 "#version 450\n"
208 "\n"
209 "vec2 vertices[3];\n"
210 "void main() { \n"
211 " vertices[0] = vec2(-10.0, -10.0);\n"
212 " vertices[1] = vec2( 10.0, -10.0);\n"
213 " vertices[2] = vec2( 0.0, 10.0);\n"
214 " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
215 "}\n";
216 // Draw with a solid color.
217 char const *fsSource =
218 "#version 450\n"
219 "\n"
220 "layout(location=0) out vec4 color;\n"
221 "void main() {\n"
222 " color = vec4(32.0/255.0);\n"
223 "}\n";
224 VkShaderObj *vs = new VkShaderObj(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
225 VkShaderObj *fs = new VkShaderObj(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
226
227 VkPipelineObj *pipe = new VkPipelineObj(m_device);
228 pipe->AddShader(vs);
229 pipe->AddShader(fs);
230 pipe->AddDefaultColorAttachment();
231
232 VkDescriptorSetObj *descriptorSet = new VkDescriptorSetObj(m_device);
233 descriptorSet->CreateVKDescriptorSet(m_commandBuffer);
234
235 pipe->CreateVKPipeline(descriptorSet->GetPipelineLayout(), renderPass());
236
237 m_commandBuffer->begin();
238 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
239
240 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe->handle());
241 m_commandBuffer->BindDescriptorSet(*descriptorSet);
242
243 VkViewport viewport = m_viewports[0];
244 VkRect2D scissors = m_scissors[0];
245
246 vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
247 vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissors);
248
249 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
250
251 m_commandBuffer->EndRenderPass();
252 m_commandBuffer->end();
253
254 VkSubmitInfo submit_info = {};
255 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
256 submit_info.commandBufferCount = 1;
257 submit_info.pCommandBuffers = &m_commandBuffer->handle();
258
259 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
260 vkQueueWaitIdle(m_device->m_queue);
261
262 auto pixels = m_renderTargets[0]->Read();
263
264 delete descriptorSet;
265 delete pipe;
266 delete fs;
267 delete vs;
268 ShutdownFramework();
269 return pixels[0][0] == 0x20202020;
270 }
271
InitFramework(PFN_vkDebugReportCallbackEXT dbgFunction,void * userData,void * instance_pnext)272 void VkRenderFramework::InitFramework(PFN_vkDebugReportCallbackEXT dbgFunction, void *userData, void *instance_pnext) {
273 // Only enable device profile layer by default if devsim is not enabled
274 if (!VkTestFramework::m_devsim_layer && InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
275 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
276 }
277
278 // Assert not already initialized
279 ASSERT_EQ((VkInstance)0, inst);
280
281 // Remove any unsupported layer names from list
282 for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end();) {
283 if (!InstanceLayerSupported(*layer)) {
284 ADD_FAILURE() << "InitFramework(): Requested layer " << *layer << " was not found. Disabled.";
285 layer = m_instance_layer_names.erase(layer);
286 } else {
287 ++layer;
288 }
289 }
290
291 // Remove any unsupported instance extension names from list
292 for (auto ext = m_instance_extension_names.begin(); ext != m_instance_extension_names.end();) {
293 if (!InstanceExtensionSupported(*ext)) {
294 ADD_FAILURE() << "InitFramework(): Requested extension " << *ext << " was not found. Disabled.";
295 ext = m_instance_extension_names.erase(ext);
296 } else {
297 ++ext;
298 }
299 }
300
301 VkInstanceCreateInfo instInfo = {};
302 VkResult U_ASSERT_ONLY err;
303
304 instInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
305 instInfo.pNext = instance_pnext;
306 instInfo.pApplicationInfo = &app_info;
307 instInfo.enabledLayerCount = m_instance_layer_names.size();
308 instInfo.ppEnabledLayerNames = m_instance_layer_names.data();
309 instInfo.enabledExtensionCount = m_instance_extension_names.size();
310 instInfo.ppEnabledExtensionNames = m_instance_extension_names.data();
311
312 VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
313 if (dbgFunction) {
314 // Enable create time debug messages
315 memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
316 dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
317 dbgCreateInfo.flags =
318 VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
319 dbgCreateInfo.pfnCallback = dbgFunction;
320 dbgCreateInfo.pUserData = userData;
321
322 dbgCreateInfo.pNext = instInfo.pNext;
323 instInfo.pNext = &dbgCreateInfo;
324 }
325
326 err = vkCreateInstance(&instInfo, NULL, &this->inst);
327 ASSERT_VK_SUCCESS(err);
328
329 err = vkEnumeratePhysicalDevices(inst, &this->gpu_count, NULL);
330 ASSERT_LE(this->gpu_count, ARRAY_SIZE(objs)) << "Too many gpus";
331 ASSERT_VK_SUCCESS(err);
332 err = vkEnumeratePhysicalDevices(inst, &this->gpu_count, objs);
333 ASSERT_VK_SUCCESS(err);
334 ASSERT_GE(this->gpu_count, (uint32_t)1) << "No GPU available";
335 if (dbgFunction) {
336 m_CreateDebugReportCallback =
337 (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(this->inst, "vkCreateDebugReportCallbackEXT");
338 ASSERT_NE(m_CreateDebugReportCallback, (PFN_vkCreateDebugReportCallbackEXT)NULL)
339 << "Did not get function pointer for CreateDebugReportCallback";
340 if (m_CreateDebugReportCallback) {
341 dbgCreateInfo.pNext = nullptr; // clean up from usage in CreateInstance above
342 err = m_CreateDebugReportCallback(this->inst, &dbgCreateInfo, NULL, &m_globalMsgCallback);
343 ASSERT_VK_SUCCESS(err);
344
345 m_DestroyDebugReportCallback =
346 (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(this->inst, "vkDestroyDebugReportCallbackEXT");
347 ASSERT_NE(m_DestroyDebugReportCallback, (PFN_vkDestroyDebugReportCallbackEXT)NULL)
348 << "Did not get function pointer for DestroyDebugReportCallback";
349 m_DebugReportMessage = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(this->inst, "vkDebugReportMessageEXT");
350 ASSERT_NE(m_DebugReportMessage, (PFN_vkDebugReportMessageEXT)NULL)
351 << "Did not get function pointer for DebugReportMessage";
352 }
353 }
354 }
355
ShutdownFramework()356 void VkRenderFramework::ShutdownFramework() {
357 // Nothing to shut down without a VkInstance
358 if (!this->inst) return;
359
360 delete m_commandBuffer;
361 m_commandBuffer = nullptr;
362 delete m_commandPool;
363 m_commandPool = nullptr;
364 if (m_framebuffer) vkDestroyFramebuffer(device(), m_framebuffer, NULL);
365 m_framebuffer = VK_NULL_HANDLE;
366 if (m_renderPass) vkDestroyRenderPass(device(), m_renderPass, NULL);
367 m_renderPass = VK_NULL_HANDLE;
368
369 if (m_globalMsgCallback) m_DestroyDebugReportCallback(this->inst, m_globalMsgCallback, NULL);
370 m_globalMsgCallback = VK_NULL_HANDLE;
371 if (m_devMsgCallback) m_DestroyDebugReportCallback(this->inst, m_devMsgCallback, NULL);
372 m_devMsgCallback = VK_NULL_HANDLE;
373
374 m_renderTargets.clear();
375
376 delete m_depthStencil;
377 m_depthStencil = nullptr;
378
379 // reset the driver
380 delete m_device;
381 m_device = nullptr;
382
383 if (this->inst) vkDestroyInstance(this->inst, NULL);
384 this->inst = (VkInstance)0; // In case we want to re-initialize
385 }
386
GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures * features)387 void VkRenderFramework::GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures *features) {
388 if (NULL == m_device) {
389 VkDeviceObj *temp_device = new VkDeviceObj(0, objs[0], m_device_extension_names);
390 *features = temp_device->phy().features();
391 delete (temp_device);
392 } else {
393 *features = m_device->phy().features();
394 }
395 }
396
GetPhysicalDeviceProperties(VkPhysicalDeviceProperties * props)397 void VkRenderFramework::GetPhysicalDeviceProperties(VkPhysicalDeviceProperties *props) {
398 *props = vk_testing::PhysicalDevice(gpu()).properties();
399 }
400
InitState(VkPhysicalDeviceFeatures * features,void * create_device_pnext,const VkCommandPoolCreateFlags flags)401 void VkRenderFramework::InitState(VkPhysicalDeviceFeatures *features, void *create_device_pnext,
402 const VkCommandPoolCreateFlags flags) {
403 // Remove any unsupported device extension names from list
404 for (auto ext = m_device_extension_names.begin(); ext != m_device_extension_names.end();) {
405 if (!DeviceExtensionSupported(objs[0], nullptr, *ext)) {
406 bool found = false;
407 for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end(); ++layer) {
408 if (DeviceExtensionSupported(objs[0], *layer, *ext)) {
409 found = true;
410 break;
411 }
412 }
413 if (!found) {
414 ADD_FAILURE() << "InitState(): The requested device extension " << *ext << " was not found. Disabled.";
415 ext = m_device_extension_names.erase(ext);
416 } else {
417 ++ext;
418 }
419 } else {
420 ++ext;
421 }
422 }
423
424 m_device = new VkDeviceObj(0, objs[0], m_device_extension_names, features, create_device_pnext);
425 m_device->SetDeviceQueue();
426
427 m_depthStencil = new VkDepthStencilObj(m_device);
428
429 m_render_target_fmt = VkTestFramework::GetFormat(inst, m_device);
430
431 m_lineWidth = 1.0f;
432
433 m_depthBiasConstantFactor = 0.0f;
434 m_depthBiasClamp = 0.0f;
435 m_depthBiasSlopeFactor = 0.0f;
436
437 m_blendConstants[0] = 1.0f;
438 m_blendConstants[1] = 1.0f;
439 m_blendConstants[2] = 1.0f;
440 m_blendConstants[3] = 1.0f;
441
442 m_minDepthBounds = 0.f;
443 m_maxDepthBounds = 1.f;
444
445 m_compareMask = 0xff;
446 m_writeMask = 0xff;
447 m_reference = 0;
448
449 m_commandPool = new VkCommandPoolObj(m_device, m_device->graphics_queue_node_index_, flags);
450
451 m_commandBuffer = new VkCommandBufferObj(m_device, m_commandPool);
452 }
453
InitViewport(float width,float height)454 void VkRenderFramework::InitViewport(float width, float height) {
455 VkViewport viewport;
456 VkRect2D scissor;
457 viewport.x = 0;
458 viewport.y = 0;
459 viewport.width = 1.f * width;
460 viewport.height = 1.f * height;
461 viewport.minDepth = 0.f;
462 viewport.maxDepth = 1.f;
463 m_viewports.push_back(viewport);
464
465 scissor.extent.width = (int32_t)width;
466 scissor.extent.height = (int32_t)height;
467 scissor.offset.x = 0;
468 scissor.offset.y = 0;
469 m_scissors.push_back(scissor);
470
471 m_width = width;
472 m_height = height;
473 }
474
InitViewport()475 void VkRenderFramework::InitViewport() { InitViewport(m_width, m_height); }
476
InitSurface()477 bool VkRenderFramework::InitSurface() { return InitSurface(m_width, m_height); }
478
479 #ifdef VK_USE_PLATFORM_WIN32_KHR
WindowProc(HWND hwnd,UINT uMsg,WPARAM wParam,LPARAM lParam)480 LRESULT CALLBACK WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
481 return DefWindowProc(hwnd, uMsg, wParam, lParam);
482 }
483 #endif // VK_USE_PLATFORM_WIN32_KHR
484
InitSurface(float width,float height)485 bool VkRenderFramework::InitSurface(float width, float height) {
486 #if defined(VK_USE_PLATFORM_WIN32_KHR)
487 HINSTANCE window_instance = GetModuleHandle(nullptr);
488 const char class_name[] = "test";
489 WNDCLASS wc = {};
490 wc.lpfnWndProc = WindowProc;
491 wc.hInstance = window_instance;
492 wc.lpszClassName = class_name;
493 RegisterClass(&wc);
494 HWND window = CreateWindowEx(0, class_name, 0, 0, 0, 0, (int)m_width, (int)m_height, NULL, NULL, window_instance, NULL);
495 ShowWindow(window, SW_HIDE);
496
497 VkWin32SurfaceCreateInfoKHR surface_create_info = {};
498 surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
499 surface_create_info.hinstance = window_instance;
500 surface_create_info.hwnd = window;
501 VkResult err = vkCreateWin32SurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
502 if (err != VK_SUCCESS) return false;
503 #endif
504
505 #if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
506 VkAndroidSurfaceCreateInfoKHR surface_create_info = {};
507 surface_create_info.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
508 surface_create_info.window = VkTestFramework::window;
509 VkResult err = vkCreateAndroidSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
510 if (err != VK_SUCCESS) return false;
511 #endif
512
513 #if defined(VK_USE_PLATFORM_XLIB_KHR)
514 Display *dpy = XOpenDisplay(NULL);
515 if (dpy) {
516 int s = DefaultScreen(dpy);
517 Window window = XCreateSimpleWindow(dpy, RootWindow(dpy, s), 0, 0, (int)m_width, (int)m_height, 1, BlackPixel(dpy, s),
518 WhitePixel(dpy, s));
519 VkXlibSurfaceCreateInfoKHR surface_create_info = {};
520 surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
521 surface_create_info.dpy = dpy;
522 surface_create_info.window = window;
523 VkResult err = vkCreateXlibSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
524 if (err != VK_SUCCESS) return false;
525 }
526 #endif
527
528 #if defined(VK_USE_PLATFORM_XCB_KHR)
529 if (m_surface == VK_NULL_HANDLE) {
530 xcb_connection_t *connection = xcb_connect(NULL, NULL);
531 if (connection) {
532 xcb_window_t window = xcb_generate_id(connection);
533 VkXcbSurfaceCreateInfoKHR surface_create_info = {};
534 surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
535 surface_create_info.connection = connection;
536 surface_create_info.window = window;
537 VkResult err = vkCreateXcbSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
538 if (err != VK_SUCCESS) return false;
539 }
540 }
541 #endif
542
543 return (m_surface == VK_NULL_HANDLE) ? false : true;
544 }
545
InitSwapchain(VkImageUsageFlags imageUsage,VkSurfaceTransformFlagBitsKHR preTransform)546 bool VkRenderFramework::InitSwapchain(VkImageUsageFlags imageUsage, VkSurfaceTransformFlagBitsKHR preTransform) {
547 if (InitSurface()) {
548 return InitSwapchain(m_surface, imageUsage, preTransform);
549 }
550 return false;
551 }
552
InitSwapchain(VkSurfaceKHR & surface,VkImageUsageFlags imageUsage,VkSurfaceTransformFlagBitsKHR preTransform)553 bool VkRenderFramework::InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage,
554 VkSurfaceTransformFlagBitsKHR preTransform) {
555 for (size_t i = 0; i < m_device->queue_props.size(); ++i) {
556 VkBool32 presentSupport = false;
557 vkGetPhysicalDeviceSurfaceSupportKHR(m_device->phy().handle(), i, surface, &presentSupport);
558 }
559
560 VkSurfaceCapabilitiesKHR capabilities;
561 vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_device->phy().handle(), surface, &capabilities);
562
563 uint32_t format_count;
564 vkGetPhysicalDeviceSurfaceFormatsKHR(m_device->phy().handle(), surface, &format_count, nullptr);
565 std::vector<VkSurfaceFormatKHR> formats;
566 if (format_count != 0) {
567 formats.resize(format_count);
568 vkGetPhysicalDeviceSurfaceFormatsKHR(m_device->phy().handle(), surface, &format_count, formats.data());
569 }
570
571 uint32_t present_mode_count;
572 vkGetPhysicalDeviceSurfacePresentModesKHR(m_device->phy().handle(), surface, &present_mode_count, nullptr);
573 std::vector<VkPresentModeKHR> present_modes;
574 if (present_mode_count != 0) {
575 present_modes.resize(present_mode_count);
576 vkGetPhysicalDeviceSurfacePresentModesKHR(m_device->phy().handle(), surface, &present_mode_count, present_modes.data());
577 }
578
579 VkSwapchainCreateInfoKHR swapchain_create_info = {};
580 swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
581 swapchain_create_info.pNext = 0;
582 swapchain_create_info.surface = surface;
583 swapchain_create_info.minImageCount = capabilities.minImageCount;
584 swapchain_create_info.imageFormat = formats[0].format;
585 swapchain_create_info.imageColorSpace = formats[0].colorSpace;
586 swapchain_create_info.imageExtent = {capabilities.minImageExtent.width, capabilities.minImageExtent.height};
587 swapchain_create_info.imageArrayLayers = capabilities.maxImageArrayLayers;
588 swapchain_create_info.imageUsage = imageUsage;
589 swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
590 swapchain_create_info.preTransform = preTransform;
591 #ifdef VK_USE_PLATFORM_ANDROID_KHR
592 swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
593 #else
594 swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
595 #endif
596 swapchain_create_info.presentMode = present_modes[0];
597 swapchain_create_info.clipped = VK_FALSE;
598 swapchain_create_info.oldSwapchain = 0;
599
600 VkResult err = vkCreateSwapchainKHR(device(), &swapchain_create_info, nullptr, &m_swapchain);
601 if (err != VK_SUCCESS) {
602 return false;
603 }
604 uint32_t imageCount = 0;
605 vkGetSwapchainImagesKHR(device(), m_swapchain, &imageCount, nullptr);
606 std::vector<VkImage> swapchainImages;
607 swapchainImages.resize(imageCount);
608 vkGetSwapchainImagesKHR(device(), m_swapchain, &imageCount, swapchainImages.data());
609 return true;
610 }
611
DestroySwapchain()612 void VkRenderFramework::DestroySwapchain() {
613 if (m_swapchain != VK_NULL_HANDLE) {
614 vkDestroySwapchainKHR(device(), m_swapchain, nullptr);
615 m_swapchain = VK_NULL_HANDLE;
616 }
617 if (m_surface != VK_NULL_HANDLE) {
618 vkDestroySurfaceKHR(instance(), m_surface, nullptr);
619 m_surface = VK_NULL_HANDLE;
620 }
621 }
622
InitRenderTarget()623 void VkRenderFramework::InitRenderTarget() { InitRenderTarget(1); }
624
InitRenderTarget(uint32_t targets)625 void VkRenderFramework::InitRenderTarget(uint32_t targets) { InitRenderTarget(targets, NULL); }
626
InitRenderTarget(VkImageView * dsBinding)627 void VkRenderFramework::InitRenderTarget(VkImageView *dsBinding) { InitRenderTarget(1, dsBinding); }
628
InitRenderTarget(uint32_t targets,VkImageView * dsBinding)629 void VkRenderFramework::InitRenderTarget(uint32_t targets, VkImageView *dsBinding) {
630 std::vector<VkAttachmentDescription> attachments;
631 std::vector<VkAttachmentReference> color_references;
632 std::vector<VkImageView> bindings;
633 attachments.reserve(targets + 1); // +1 for dsBinding
634 color_references.reserve(targets);
635 bindings.reserve(targets + 1); // +1 for dsBinding
636
637 VkAttachmentDescription att = {};
638 att.format = m_render_target_fmt;
639 att.samples = VK_SAMPLE_COUNT_1_BIT;
640 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
641 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
642 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
643 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
644 att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
645 att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
646
647 VkAttachmentReference ref = {};
648 ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
649
650 m_renderPassClearValues.clear();
651 VkClearValue clear = {};
652 clear.color = m_clear_color;
653
654 for (uint32_t i = 0; i < targets; i++) {
655 attachments.push_back(att);
656
657 ref.attachment = i;
658 color_references.push_back(ref);
659
660 m_renderPassClearValues.push_back(clear);
661
662 std::unique_ptr<VkImageObj> img(new VkImageObj(m_device));
663
664 VkFormatProperties props;
665
666 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), m_render_target_fmt, &props);
667
668 if (props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
669 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
670 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
671 VK_IMAGE_TILING_LINEAR);
672 } else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
673 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
674 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
675 VK_IMAGE_TILING_OPTIMAL);
676 } else {
677 FAIL() << "Neither Linear nor Optimal allowed for render target";
678 }
679
680 bindings.push_back(img->targetView(m_render_target_fmt));
681 m_renderTargets.push_back(std::move(img));
682 }
683
684 VkSubpassDescription subpass = {};
685 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
686 subpass.flags = 0;
687 subpass.inputAttachmentCount = 0;
688 subpass.pInputAttachments = NULL;
689 subpass.colorAttachmentCount = targets;
690 subpass.pColorAttachments = color_references.data();
691 subpass.pResolveAttachments = NULL;
692
693 VkAttachmentReference ds_reference;
694 if (dsBinding) {
695 att.format = m_depth_stencil_fmt;
696 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
697 ;
698 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
699 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
700 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
701 att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
702 att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
703 attachments.push_back(att);
704
705 clear.depthStencil.depth = m_depth_clear_color;
706 clear.depthStencil.stencil = m_stencil_clear_color;
707 m_renderPassClearValues.push_back(clear);
708
709 bindings.push_back(*dsBinding);
710
711 ds_reference.attachment = targets;
712 ds_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
713 subpass.pDepthStencilAttachment = &ds_reference;
714 } else {
715 subpass.pDepthStencilAttachment = NULL;
716 }
717
718 subpass.preserveAttachmentCount = 0;
719 subpass.pPreserveAttachments = NULL;
720
721 VkRenderPassCreateInfo rp_info = {};
722 rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
723 rp_info.attachmentCount = attachments.size();
724 rp_info.pAttachments = attachments.data();
725 rp_info.subpassCount = 1;
726 rp_info.pSubpasses = &subpass;
727 VkSubpassDependency subpass_dep = {};
728 if (m_addRenderPassSelfDependency) {
729 // Add a subpass self-dependency to subpass 0 of default renderPass
730 subpass_dep.srcSubpass = 0;
731 subpass_dep.dstSubpass = 0;
732 // Just using all framebuffer-space pipeline stages in order to get a reasonably large
733 // set of bits that can be used for both src & dst
734 subpass_dep.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
735 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
736 subpass_dep.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
737 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
738 // Add all of the gfx mem access bits that correlate to the fb-space pipeline stages
739 subpass_dep.srcAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
740 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
741 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
742 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
743 subpass_dep.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
744 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
745 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
746 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
747 // Must include dep_by_region bit when src & dst both include framebuffer-space stages
748 subpass_dep.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
749 rp_info.dependencyCount = 1;
750 rp_info.pDependencies = &subpass_dep;
751 }
752
753 vkCreateRenderPass(device(), &rp_info, NULL, &m_renderPass);
754 renderPass_info_ = rp_info; // Save away a copy for tests that need access to the render pass state
755 // Create Framebuffer and RenderPass with color attachments and any
756 // depth/stencil attachment
757 VkFramebufferCreateInfo fb_info = {};
758 fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
759 fb_info.pNext = NULL;
760 fb_info.renderPass = m_renderPass;
761 fb_info.attachmentCount = bindings.size();
762 fb_info.pAttachments = bindings.data();
763 fb_info.width = (uint32_t)m_width;
764 fb_info.height = (uint32_t)m_height;
765 fb_info.layers = 1;
766
767 vkCreateFramebuffer(device(), &fb_info, NULL, &m_framebuffer);
768
769 m_renderPassBeginInfo.renderPass = m_renderPass;
770 m_renderPassBeginInfo.framebuffer = m_framebuffer;
771 m_renderPassBeginInfo.renderArea.extent.width = (int32_t)m_width;
772 m_renderPassBeginInfo.renderArea.extent.height = (int32_t)m_height;
773 m_renderPassBeginInfo.clearValueCount = m_renderPassClearValues.size();
774 m_renderPassBeginInfo.pClearValues = m_renderPassClearValues.data();
775 }
776
DestroyRenderTarget()777 void VkRenderFramework::DestroyRenderTarget() {
778 vkDestroyRenderPass(device(), m_renderPass, nullptr);
779 m_renderPass = VK_NULL_HANDLE;
780 vkDestroyFramebuffer(device(), m_framebuffer, nullptr);
781 m_framebuffer = VK_NULL_HANDLE;
782 }
783
VkDeviceObj(uint32_t id,VkPhysicalDevice obj)784 VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj) : vk_testing::Device(obj), id(id) {
785 init();
786
787 props = phy().properties();
788 queue_props = phy().queue_properties();
789 }
790
VkDeviceObj(uint32_t id,VkPhysicalDevice obj,std::vector<const char * > & extension_names,VkPhysicalDeviceFeatures * features,void * create_device_pnext)791 VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj, std::vector<const char *> &extension_names,
792 VkPhysicalDeviceFeatures *features, void *create_device_pnext)
793 : vk_testing::Device(obj), id(id) {
794 init(extension_names, features, create_device_pnext);
795
796 props = phy().properties();
797 queue_props = phy().queue_properties();
798 }
799
QueueFamilyMatching(VkQueueFlags with,VkQueueFlags without,bool all_bits)800 uint32_t VkDeviceObj::QueueFamilyMatching(VkQueueFlags with, VkQueueFlags without, bool all_bits) {
801 // Find a queue family with and without desired capabilities
802 for (uint32_t i = 0; i < queue_props.size(); i++) {
803 auto flags = queue_props[i].queueFlags;
804 bool matches = all_bits ? (flags & with) == with : (flags & with) != 0;
805 if (matches && ((flags & without) == 0) && (queue_props[i].queueCount > 0)) {
806 return i;
807 }
808 }
809 return UINT32_MAX;
810 }
811
SetDeviceQueue()812 void VkDeviceObj::SetDeviceQueue() {
813 ASSERT_NE(true, graphics_queues().empty());
814 m_queue = graphics_queues()[0]->handle();
815 }
816
GetDefaultQueue()817 VkQueueObj *VkDeviceObj::GetDefaultQueue() {
818 if (graphics_queues().empty()) return nullptr;
819 return graphics_queues()[0];
820 }
821
GetDefaultComputeQueue()822 VkQueueObj *VkDeviceObj::GetDefaultComputeQueue() {
823 if (compute_queues().empty()) return nullptr;
824 return compute_queues()[0];
825 }
826
VkDescriptorSetLayoutObj(const VkDeviceObj * device,const std::vector<VkDescriptorSetLayoutBinding> & descriptor_set_bindings,VkDescriptorSetLayoutCreateFlags flags,void * pNext)827 VkDescriptorSetLayoutObj::VkDescriptorSetLayoutObj(const VkDeviceObj *device,
828 const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings,
829 VkDescriptorSetLayoutCreateFlags flags, void *pNext) {
830 VkDescriptorSetLayoutCreateInfo dsl_ci = {};
831 dsl_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
832 dsl_ci.pNext = pNext;
833 dsl_ci.flags = flags;
834 dsl_ci.bindingCount = static_cast<uint32_t>(descriptor_set_bindings.size());
835 dsl_ci.pBindings = descriptor_set_bindings.data();
836
837 init(*device, dsl_ci);
838 }
839
VkDescriptorSetObj(VkDeviceObj * device)840 VkDescriptorSetObj::VkDescriptorSetObj(VkDeviceObj *device) : m_device(device), m_nextSlot(0) {}
841
~VkDescriptorSetObj()842 VkDescriptorSetObj::~VkDescriptorSetObj() {
843 if (m_set) {
844 delete m_set;
845 }
846 }
847
AppendDummy()848 int VkDescriptorSetObj::AppendDummy() {
849 /* request a descriptor but do not update it */
850 VkDescriptorSetLayoutBinding binding = {};
851 binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
852 binding.descriptorCount = 1;
853 binding.binding = m_layout_bindings.size();
854 binding.stageFlags = VK_SHADER_STAGE_ALL;
855 binding.pImmutableSamplers = NULL;
856
857 m_layout_bindings.push_back(binding);
858 m_type_counts[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] += binding.descriptorCount;
859
860 return m_nextSlot++;
861 }
862
AppendBuffer(VkDescriptorType type,VkConstantBufferObj & constantBuffer)863 int VkDescriptorSetObj::AppendBuffer(VkDescriptorType type, VkConstantBufferObj &constantBuffer) {
864 assert(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
865 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
866 VkDescriptorSetLayoutBinding binding = {};
867 binding.descriptorType = type;
868 binding.descriptorCount = 1;
869 binding.binding = m_layout_bindings.size();
870 binding.stageFlags = VK_SHADER_STAGE_ALL;
871 binding.pImmutableSamplers = NULL;
872
873 m_layout_bindings.push_back(binding);
874 m_type_counts[type] += binding.descriptorCount;
875
876 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0, type, 1,
877 &constantBuffer.m_descriptorBufferInfo));
878
879 return m_nextSlot++;
880 }
881
AppendSamplerTexture(VkSamplerObj * sampler,VkTextureObj * texture)882 int VkDescriptorSetObj::AppendSamplerTexture(VkSamplerObj *sampler, VkTextureObj *texture) {
883 VkDescriptorSetLayoutBinding binding = {};
884 binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
885 binding.descriptorCount = 1;
886 binding.binding = m_layout_bindings.size();
887 binding.stageFlags = VK_SHADER_STAGE_ALL;
888 binding.pImmutableSamplers = NULL;
889
890 m_layout_bindings.push_back(binding);
891 m_type_counts[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] += binding.descriptorCount;
892 VkDescriptorImageInfo tmp = texture->DescriptorImageInfo();
893 tmp.sampler = sampler->handle();
894 m_imageSamplerDescriptors.push_back(tmp);
895
896 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0,
897 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &tmp));
898
899 return m_nextSlot++;
900 }
901
GetPipelineLayout() const902 VkPipelineLayout VkDescriptorSetObj::GetPipelineLayout() const { return m_pipeline_layout.handle(); }
903
GetDescriptorSetHandle() const904 VkDescriptorSet VkDescriptorSetObj::GetDescriptorSetHandle() const {
905 if (m_set)
906 return m_set->handle();
907 else
908 return VK_NULL_HANDLE;
909 }
910
CreateVKDescriptorSet(VkCommandBufferObj * commandBuffer)911 void VkDescriptorSetObj::CreateVKDescriptorSet(VkCommandBufferObj *commandBuffer) {
912 if (m_type_counts.size()) {
913 // create VkDescriptorPool
914 VkDescriptorPoolSize poolSize;
915 vector<VkDescriptorPoolSize> sizes;
916 for (auto it = m_type_counts.begin(); it != m_type_counts.end(); ++it) {
917 poolSize.descriptorCount = it->second;
918 poolSize.type = it->first;
919 sizes.push_back(poolSize);
920 }
921 VkDescriptorPoolCreateInfo pool = {};
922 pool.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
923 pool.poolSizeCount = sizes.size();
924 pool.maxSets = 1;
925 pool.pPoolSizes = sizes.data();
926 init(*m_device, pool);
927 }
928
929 // create VkDescriptorSetLayout
930 VkDescriptorSetLayoutCreateInfo layout = {};
931 layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
932 layout.bindingCount = m_layout_bindings.size();
933 layout.pBindings = m_layout_bindings.data();
934
935 m_layout.init(*m_device, layout);
936 vector<const vk_testing::DescriptorSetLayout *> layouts;
937 layouts.push_back(&m_layout);
938
939 // create VkPipelineLayout
940 VkPipelineLayoutCreateInfo pipeline_layout = {};
941 pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
942 pipeline_layout.setLayoutCount = layouts.size();
943 pipeline_layout.pSetLayouts = NULL;
944
945 m_pipeline_layout.init(*m_device, pipeline_layout, layouts);
946
947 if (m_type_counts.size()) {
948 // create VkDescriptorSet
949 m_set = alloc_sets(*m_device, m_layout);
950
951 // build the update array
952 size_t imageSamplerCount = 0;
953 for (std::vector<VkWriteDescriptorSet>::iterator it = m_writes.begin(); it != m_writes.end(); it++) {
954 it->dstSet = m_set->handle();
955 if (it->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
956 it->pImageInfo = &m_imageSamplerDescriptors[imageSamplerCount++];
957 }
958
959 // do the updates
960 m_device->update_descriptor_sets(m_writes);
961 }
962 }
963
VkRenderpassObj(VkDeviceObj * dev)964 VkRenderpassObj::VkRenderpassObj(VkDeviceObj *dev) {
965 // Create a renderPass with a single color attachment
966 VkAttachmentReference attach = {};
967 attach.layout = VK_IMAGE_LAYOUT_GENERAL;
968
969 VkSubpassDescription subpass = {};
970 subpass.pColorAttachments = &attach;
971 subpass.colorAttachmentCount = 1;
972
973 VkRenderPassCreateInfo rpci = {};
974 rpci.subpassCount = 1;
975 rpci.pSubpasses = &subpass;
976 rpci.attachmentCount = 1;
977
978 VkAttachmentDescription attach_desc = {};
979 attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
980 attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
981 attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
982 attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
983
984 rpci.pAttachments = &attach_desc;
985 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
986
987 device = dev->device();
988 vkCreateRenderPass(device, &rpci, NULL, &m_renderpass);
989 }
990
~VkRenderpassObj()991 VkRenderpassObj::~VkRenderpassObj() { vkDestroyRenderPass(device, m_renderpass, NULL); }
992
VkImageObj(VkDeviceObj * dev)993 VkImageObj::VkImageObj(VkDeviceObj *dev) {
994 m_device = dev;
995 m_descriptorImageInfo.imageView = VK_NULL_HANDLE;
996 m_descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
997 }
998
999 // clang-format off
ImageMemoryBarrier(VkCommandBufferObj * cmd_buf,VkImageAspectFlags aspect,VkFlags output_mask,VkFlags input_mask,VkImageLayout image_layout,VkPipelineStageFlags src_stages,VkPipelineStageFlags dest_stages,uint32_t srcQueueFamilyIndex,uint32_t dstQueueFamilyIndex)1000 void VkImageObj::ImageMemoryBarrier(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect,
1001 VkFlags output_mask /*=
1002 VK_ACCESS_HOST_WRITE_BIT |
1003 VK_ACCESS_SHADER_WRITE_BIT |
1004 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
1005 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
1006 VK_MEMORY_OUTPUT_COPY_BIT*/,
1007 VkFlags input_mask /*=
1008 VK_ACCESS_HOST_READ_BIT |
1009 VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
1010 VK_ACCESS_INDEX_READ_BIT |
1011 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
1012 VK_ACCESS_UNIFORM_READ_BIT |
1013 VK_ACCESS_SHADER_READ_BIT |
1014 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
1015 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
1016 VK_MEMORY_INPUT_COPY_BIT*/, VkImageLayout image_layout,
1017 VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
1018 uint32_t srcQueueFamilyIndex, uint32_t dstQueueFamilyIndex) {
1019 // clang-format on
1020 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
1021 const VkImageSubresourceRange subresourceRange =
1022 subresource_range(aspect, 0, /*VK_REMAINING_MIP_LEVELS*/ 1, 0, 1 /*VK_REMAINING_ARRAY_LAYERS*/);
1023 VkImageMemoryBarrier barrier;
1024 barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange, srcQueueFamilyIndex,
1025 dstQueueFamilyIndex);
1026
1027 VkImageMemoryBarrier *pmemory_barrier = &barrier;
1028
1029 // write barrier to the command buffer
1030 vkCmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, VK_DEPENDENCY_BY_REGION_BIT, 0, NULL, 0, NULL, 1,
1031 pmemory_barrier);
1032 }
1033
SetLayout(VkCommandBufferObj * cmd_buf,VkImageAspectFlags aspect,VkImageLayout image_layout)1034 void VkImageObj::SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout) {
1035 VkFlags src_mask, dst_mask;
1036 const VkFlags all_cache_outputs = VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
1037 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
1038 const VkFlags all_cache_inputs = VK_ACCESS_HOST_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT |
1039 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
1040 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
1041 VK_ACCESS_MEMORY_READ_BIT;
1042
1043 if (image_layout == m_descriptorImageInfo.imageLayout) {
1044 return;
1045 }
1046
1047 switch (image_layout) {
1048 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
1049 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
1050 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
1051 else
1052 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
1053 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT;
1054 break;
1055
1056 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
1057 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
1058 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
1059 else if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
1060 src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
1061 else
1062 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
1063 dst_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
1064 break;
1065
1066 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
1067 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
1068 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
1069 else
1070 src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
1071 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT;
1072 break;
1073
1074 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
1075 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
1076 src_mask = VK_ACCESS_TRANSFER_READ_BIT;
1077 else
1078 src_mask = 0;
1079 dst_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
1080 break;
1081
1082 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
1083 dst_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
1084 src_mask = all_cache_outputs;
1085 break;
1086
1087 default:
1088 src_mask = all_cache_outputs;
1089 dst_mask = all_cache_inputs;
1090 break;
1091 }
1092
1093 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_UNDEFINED) src_mask = 0;
1094
1095 ImageMemoryBarrier(cmd_buf, aspect, src_mask, dst_mask, image_layout);
1096 m_descriptorImageInfo.imageLayout = image_layout;
1097 }
1098
SetLayout(VkImageAspectFlags aspect,VkImageLayout image_layout)1099 void VkImageObj::SetLayout(VkImageAspectFlags aspect, VkImageLayout image_layout) {
1100 if (image_layout == m_descriptorImageInfo.imageLayout) {
1101 return;
1102 }
1103
1104 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1105 VkCommandBufferObj cmd_buf(m_device, &pool);
1106
1107 /* Build command buffer to set image layout in the driver */
1108 cmd_buf.begin();
1109 SetLayout(&cmd_buf, aspect, image_layout);
1110 cmd_buf.end();
1111
1112 cmd_buf.QueueCommandBuffer();
1113 }
1114
IsCompatible(const VkImageUsageFlags usages,const VkFormatFeatureFlags features)1115 bool VkImageObj::IsCompatible(const VkImageUsageFlags usages, const VkFormatFeatureFlags features) {
1116 VkFormatFeatureFlags all_feature_flags =
1117 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
1118 VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT |
1119 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT |
1120 VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT |
1121 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
1122 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
1123 if (m_device->IsEnabledExtension(VK_IMG_FILTER_CUBIC_EXTENSION_NAME)) {
1124 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG;
1125 }
1126
1127 if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
1128 all_feature_flags |= VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR;
1129 }
1130
1131 if (m_device->IsEnabledExtension(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME)) {
1132 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT;
1133 }
1134
1135 if (m_device->IsEnabledExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
1136 all_feature_flags |= VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR |
1137 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR |
1138 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR |
1139 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR |
1140 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR |
1141 VK_FORMAT_FEATURE_DISJOINT_BIT_KHR | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR;
1142 }
1143
1144 if ((features & all_feature_flags) == 0) return false; // whole format unsupported
1145
1146 if ((usages & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) return false;
1147 if ((usages & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) return false;
1148 if ((usages & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) return false;
1149 if ((usages & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
1150 return false;
1151
1152 if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
1153 // WORKAROUND: for DevSim not reporting extended enums, and possibly some drivers too
1154 const auto all_nontransfer_feature_flags =
1155 all_feature_flags ^ (VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR);
1156 const bool transfer_probably_supported_anyway = (features & all_nontransfer_feature_flags) > 0;
1157 if (!transfer_probably_supported_anyway) {
1158 if ((usages & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR)) return false;
1159 if ((usages & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) return false;
1160 }
1161 }
1162
1163 return true;
1164 }
1165
InitNoLayout(uint32_t const width,uint32_t const height,uint32_t const mipLevels,VkFormat const format,VkFlags const usage,VkImageTiling const requested_tiling,VkMemoryPropertyFlags const reqs,const std::vector<uint32_t> * queue_families,bool memory)1166 void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
1167 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
1168 const std::vector<uint32_t> *queue_families, bool memory) {
1169 VkFormatProperties image_fmt;
1170 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
1171
1172 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), format, &image_fmt);
1173
1174 if (requested_tiling == VK_IMAGE_TILING_LINEAR) {
1175 if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
1176 tiling = VK_IMAGE_TILING_LINEAR;
1177 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
1178 tiling = VK_IMAGE_TILING_OPTIMAL;
1179 } else {
1180 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
1181 << ", supported linear features: " << image_fmt.linearTilingFeatures;
1182 }
1183 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
1184 tiling = VK_IMAGE_TILING_OPTIMAL;
1185 } else if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
1186 tiling = VK_IMAGE_TILING_LINEAR;
1187 } else {
1188 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
1189 << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
1190 }
1191
1192 VkImageCreateInfo imageCreateInfo = vk_testing::Image::create_info();
1193 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1194 imageCreateInfo.format = format;
1195 imageCreateInfo.extent.width = width;
1196 imageCreateInfo.extent.height = height;
1197 imageCreateInfo.mipLevels = mipLevels;
1198 imageCreateInfo.tiling = tiling;
1199 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1200
1201 // Automatically set sharing mode etc. based on queue family information
1202 if (queue_families && (queue_families->size() > 1)) {
1203 imageCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
1204 imageCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queue_families->size());
1205 imageCreateInfo.pQueueFamilyIndices = queue_families->data();
1206 }
1207
1208 Layout(imageCreateInfo.initialLayout);
1209 imageCreateInfo.usage = usage;
1210 if (memory)
1211 vk_testing::Image::init(*m_device, imageCreateInfo, reqs);
1212 else
1213 vk_testing::Image::init_no_mem(*m_device, imageCreateInfo);
1214 }
1215
Init(uint32_t const width,uint32_t const height,uint32_t const mipLevels,VkFormat const format,VkFlags const usage,VkImageTiling const requested_tiling,VkMemoryPropertyFlags const reqs,const std::vector<uint32_t> * queue_families,bool memory)1216 void VkImageObj::Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
1217 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
1218 const std::vector<uint32_t> *queue_families, bool memory) {
1219 InitNoLayout(width, height, mipLevels, format, usage, requested_tiling, reqs, queue_families, memory);
1220
1221 if (!initialized() || !memory) return; // We don't have a valid handle from early stage init, and thus SetLayout will fail
1222
1223 VkImageLayout newLayout;
1224 if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
1225 newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1226 else if (usage & VK_IMAGE_USAGE_SAMPLED_BIT)
1227 newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1228 else
1229 newLayout = m_descriptorImageInfo.imageLayout;
1230
1231 VkImageAspectFlags image_aspect = 0;
1232 if (FormatIsDepthAndStencil(format)) {
1233 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1234 } else if (FormatIsDepthOnly(format)) {
1235 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1236 } else if (FormatIsStencilOnly(format)) {
1237 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1238 } else { // color
1239 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1240 }
1241 SetLayout(image_aspect, newLayout);
1242 }
1243
init(const VkImageCreateInfo * create_info)1244 void VkImageObj::init(const VkImageCreateInfo *create_info) {
1245 VkFormatProperties image_fmt;
1246 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info->format, &image_fmt);
1247
1248 switch (create_info->tiling) {
1249 case VK_IMAGE_TILING_OPTIMAL:
1250 if (!IsCompatible(create_info->usage, image_fmt.optimalTilingFeatures)) {
1251 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
1252 << create_info->usage << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
1253 }
1254 break;
1255 case VK_IMAGE_TILING_LINEAR:
1256 if (!IsCompatible(create_info->usage, image_fmt.linearTilingFeatures)) {
1257 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
1258 << create_info->usage << ", supported linear features: " << image_fmt.linearTilingFeatures;
1259 }
1260 break;
1261 default:
1262 break;
1263 }
1264 Layout(create_info->initialLayout);
1265
1266 vk_testing::Image::init(*m_device, *create_info, 0);
1267
1268 VkImageAspectFlags image_aspect = 0;
1269 if (FormatIsDepthAndStencil(create_info->format)) {
1270 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1271 } else if (FormatIsDepthOnly(create_info->format)) {
1272 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1273 } else if (FormatIsStencilOnly(create_info->format)) {
1274 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1275 } else { // color
1276 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1277 }
1278 SetLayout(image_aspect, VK_IMAGE_LAYOUT_GENERAL);
1279 }
1280
CopyImage(VkImageObj & src_image)1281 VkResult VkImageObj::CopyImage(VkImageObj &src_image) {
1282 VkImageLayout src_image_layout, dest_image_layout;
1283
1284 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1285 VkCommandBufferObj cmd_buf(m_device, &pool);
1286
1287 /* Build command buffer to copy staging texture to usable texture */
1288 cmd_buf.begin();
1289
1290 /* TODO: Can we determine image aspect from image object? */
1291 src_image_layout = src_image.Layout();
1292 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1293
1294 dest_image_layout = (this->Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout();
1295 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1296
1297 VkImageCopy copy_region = {};
1298 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1299 copy_region.srcSubresource.baseArrayLayer = 0;
1300 copy_region.srcSubresource.mipLevel = 0;
1301 copy_region.srcSubresource.layerCount = 1;
1302 copy_region.srcOffset.x = 0;
1303 copy_region.srcOffset.y = 0;
1304 copy_region.srcOffset.z = 0;
1305 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1306 copy_region.dstSubresource.baseArrayLayer = 0;
1307 copy_region.dstSubresource.mipLevel = 0;
1308 copy_region.dstSubresource.layerCount = 1;
1309 copy_region.dstOffset.x = 0;
1310 copy_region.dstOffset.y = 0;
1311 copy_region.dstOffset.z = 0;
1312 copy_region.extent = src_image.extent();
1313
1314 vkCmdCopyImage(cmd_buf.handle(), src_image.handle(), src_image.Layout(), handle(), Layout(), 1, ©_region);
1315
1316 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
1317
1318 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
1319
1320 cmd_buf.end();
1321
1322 cmd_buf.QueueCommandBuffer();
1323
1324 return VK_SUCCESS;
1325 }
1326
1327 // Same as CopyImage, but in the opposite direction
CopyImageOut(VkImageObj & dst_image)1328 VkResult VkImageObj::CopyImageOut(VkImageObj &dst_image) {
1329 VkImageLayout src_image_layout, dest_image_layout;
1330
1331 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1332 VkCommandBufferObj cmd_buf(m_device, &pool);
1333
1334 cmd_buf.begin();
1335
1336 src_image_layout = this->Layout();
1337 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1338
1339 dest_image_layout = (dst_image.Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : dst_image.Layout();
1340 dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1341
1342 VkImageCopy copy_region = {};
1343 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1344 copy_region.srcSubresource.baseArrayLayer = 0;
1345 copy_region.srcSubresource.mipLevel = 0;
1346 copy_region.srcSubresource.layerCount = 1;
1347 copy_region.srcOffset.x = 0;
1348 copy_region.srcOffset.y = 0;
1349 copy_region.srcOffset.z = 0;
1350 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1351 copy_region.dstSubresource.baseArrayLayer = 0;
1352 copy_region.dstSubresource.mipLevel = 0;
1353 copy_region.dstSubresource.layerCount = 1;
1354 copy_region.dstOffset.x = 0;
1355 copy_region.dstOffset.y = 0;
1356 copy_region.dstOffset.z = 0;
1357 copy_region.extent = dst_image.extent();
1358
1359 vkCmdCopyImage(cmd_buf.handle(), handle(), Layout(), dst_image.handle(), dst_image.Layout(), 1, ©_region);
1360
1361 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
1362
1363 dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
1364
1365 cmd_buf.end();
1366
1367 cmd_buf.QueueCommandBuffer();
1368
1369 return VK_SUCCESS;
1370 }
1371
1372 // Return 16x16 pixel block
Read()1373 std::array<std::array<uint32_t, 16>, 16> VkImageObj::Read() {
1374 VkImageObj stagingImage(m_device);
1375 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1376
1377 stagingImage.Init(16, 16, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1378 VK_IMAGE_TILING_LINEAR, reqs);
1379 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1380 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
1381 CopyImageOut(stagingImage);
1382 void *data = stagingImage.MapMemory();
1383 std::array<std::array<uint32_t, 16>, 16> m = {};
1384 if (data) {
1385 for (uint32_t y = 0; y < stagingImage.extent().height; y++) {
1386 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
1387 for (uint32_t x = 0; x < stagingImage.extent().width; x++) m[y][x] = row[x];
1388 }
1389 }
1390 stagingImage.UnmapMemory();
1391 return m;
1392 }
1393
VkTextureObj(VkDeviceObj * device,uint32_t * colors)1394 VkTextureObj::VkTextureObj(VkDeviceObj *device, uint32_t *colors) : VkImageObj(device) {
1395 m_device = device;
1396 const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
1397 uint32_t tex_colors[2] = {0xffff0000, 0xff00ff00};
1398 void *data;
1399 uint32_t x, y;
1400 VkImageObj stagingImage(device);
1401 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1402
1403 stagingImage.Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1404 VK_IMAGE_TILING_LINEAR, reqs);
1405 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
1406
1407 if (colors == NULL) colors = tex_colors;
1408
1409 VkImageViewCreateInfo view = {};
1410 view.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1411 view.pNext = NULL;
1412 view.image = VK_NULL_HANDLE;
1413 view.viewType = VK_IMAGE_VIEW_TYPE_2D;
1414 view.format = tex_format;
1415 view.components.r = VK_COMPONENT_SWIZZLE_R;
1416 view.components.g = VK_COMPONENT_SWIZZLE_G;
1417 view.components.b = VK_COMPONENT_SWIZZLE_B;
1418 view.components.a = VK_COMPONENT_SWIZZLE_A;
1419 view.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1420 view.subresourceRange.baseMipLevel = 0;
1421 view.subresourceRange.levelCount = 1;
1422 view.subresourceRange.baseArrayLayer = 0;
1423 view.subresourceRange.layerCount = 1;
1424
1425 /* create image */
1426 Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
1427 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1428
1429 /* create image view */
1430 view.image = handle();
1431 m_textureView.init(*m_device, view);
1432 m_descriptorImageInfo.imageView = m_textureView.handle();
1433
1434 data = stagingImage.MapMemory();
1435
1436 for (y = 0; y < extent().height; y++) {
1437 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
1438 for (x = 0; x < extent().width; x++) row[x] = colors[(x & 1) ^ (y & 1)];
1439 }
1440 stagingImage.UnmapMemory();
1441 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1442 VkImageObj::CopyImage(stagingImage);
1443 }
1444
VkSamplerObj(VkDeviceObj * device)1445 VkSamplerObj::VkSamplerObj(VkDeviceObj *device) {
1446 m_device = device;
1447
1448 VkSamplerCreateInfo samplerCreateInfo;
1449 memset(&samplerCreateInfo, 0, sizeof(samplerCreateInfo));
1450 samplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
1451 samplerCreateInfo.magFilter = VK_FILTER_NEAREST;
1452 samplerCreateInfo.minFilter = VK_FILTER_NEAREST;
1453 samplerCreateInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
1454 samplerCreateInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1455 samplerCreateInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1456 samplerCreateInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1457 samplerCreateInfo.mipLodBias = 0.0;
1458 samplerCreateInfo.anisotropyEnable = VK_FALSE;
1459 samplerCreateInfo.maxAnisotropy = 1;
1460 samplerCreateInfo.compareOp = VK_COMPARE_OP_NEVER;
1461 samplerCreateInfo.minLod = 0.0;
1462 samplerCreateInfo.maxLod = 0.0;
1463 samplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
1464 samplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
1465
1466 init(*m_device, samplerCreateInfo);
1467 }
1468
1469 /*
1470 * Basic ConstantBuffer constructor. Then use create methods to fill in the
1471 * details.
1472 */
VkConstantBufferObj(VkDeviceObj * device,VkBufferUsageFlags usage)1473 VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkBufferUsageFlags usage) {
1474 m_device = device;
1475
1476 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
1477
1478 // Special case for usages outside of original limits of framework
1479 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) != usage) {
1480 init_no_mem(*m_device, create_info(0, usage));
1481 }
1482 }
1483
VkConstantBufferObj(VkDeviceObj * device,VkDeviceSize allocationSize,const void * data,VkBufferUsageFlags usage)1484 VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize allocationSize, const void *data,
1485 VkBufferUsageFlags usage) {
1486 m_device = device;
1487
1488 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
1489
1490 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1491
1492 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) == usage) {
1493 init_as_src_and_dst(*m_device, allocationSize, reqs);
1494 } else {
1495 init(*m_device, create_info(allocationSize, usage), reqs);
1496 }
1497
1498 void *pData = memory().map();
1499 memcpy(pData, data, static_cast<size_t>(allocationSize));
1500 memory().unmap();
1501
1502 /*
1503 * Constant buffers are going to be used as vertex input buffers
1504 * or as shader uniform buffers. So, we'll create the shaderbuffer
1505 * descriptor here so it's ready if needed.
1506 */
1507 this->m_descriptorBufferInfo.buffer = handle();
1508 this->m_descriptorBufferInfo.offset = 0;
1509 this->m_descriptorBufferInfo.range = allocationSize;
1510 }
1511
GetStageCreateInfo() const1512 VkPipelineShaderStageCreateInfo const &VkShaderObj::GetStageCreateInfo() const { return m_stage_info; }
1513
VkShaderObj(VkDeviceObj * device,const char * shader_code,VkShaderStageFlagBits stage,VkRenderFramework * framework,char const * name,bool debug,VkSpecializationInfo * specInfo)1514 VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderStageFlagBits stage, VkRenderFramework *framework,
1515 char const *name, bool debug, VkSpecializationInfo *specInfo) {
1516 VkResult U_ASSERT_ONLY err = VK_SUCCESS;
1517 std::vector<unsigned int> spv;
1518 VkShaderModuleCreateInfo moduleCreateInfo;
1519
1520 m_device = device;
1521 m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1522 m_stage_info.pNext = nullptr;
1523 m_stage_info.flags = 0;
1524 m_stage_info.stage = stage;
1525 m_stage_info.module = VK_NULL_HANDLE;
1526 m_stage_info.pName = name;
1527 m_stage_info.pSpecializationInfo = specInfo;
1528
1529 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1530 moduleCreateInfo.pNext = nullptr;
1531
1532 framework->GLSLtoSPV(stage, shader_code, spv, debug);
1533 moduleCreateInfo.pCode = spv.data();
1534 moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int);
1535 moduleCreateInfo.flags = 0;
1536
1537 err = init_try(*m_device, moduleCreateInfo);
1538 m_stage_info.module = handle();
1539 assert(VK_SUCCESS == err);
1540 }
1541
VkShaderObj(VkDeviceObj * device,const std::string spv_source,VkShaderStageFlagBits stage,VkRenderFramework * framework,char const * name,VkSpecializationInfo * specInfo)1542 VkShaderObj::VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkShaderStageFlagBits stage,
1543 VkRenderFramework *framework, char const *name, VkSpecializationInfo *specInfo) {
1544 VkResult U_ASSERT_ONLY err = VK_SUCCESS;
1545 std::vector<unsigned int> spv;
1546 VkShaderModuleCreateInfo moduleCreateInfo;
1547
1548 m_device = device;
1549 m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1550 m_stage_info.pNext = nullptr;
1551 m_stage_info.flags = 0;
1552 m_stage_info.stage = stage;
1553 m_stage_info.module = VK_NULL_HANDLE;
1554 m_stage_info.pName = name;
1555 m_stage_info.pSpecializationInfo = specInfo;
1556
1557 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1558 moduleCreateInfo.pNext = nullptr;
1559
1560 framework->ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
1561 moduleCreateInfo.pCode = spv.data();
1562 moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int);
1563 moduleCreateInfo.flags = 0;
1564
1565 err = init_try(*m_device, moduleCreateInfo);
1566 m_stage_info.module = handle();
1567 assert(VK_SUCCESS == err);
1568 }
1569
VkPipelineLayoutObj(VkDeviceObj * device,const std::vector<const VkDescriptorSetLayoutObj * > & descriptor_layouts,const std::vector<VkPushConstantRange> & push_constant_ranges)1570 VkPipelineLayoutObj::VkPipelineLayoutObj(VkDeviceObj *device,
1571 const std::vector<const VkDescriptorSetLayoutObj *> &descriptor_layouts,
1572 const std::vector<VkPushConstantRange> &push_constant_ranges) {
1573 VkPipelineLayoutCreateInfo pl_ci = {};
1574 pl_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1575 pl_ci.pushConstantRangeCount = static_cast<uint32_t>(push_constant_ranges.size());
1576 pl_ci.pPushConstantRanges = push_constant_ranges.data();
1577
1578 auto descriptor_layouts_unwrapped = MakeTestbindingHandles<const vk_testing::DescriptorSetLayout>(descriptor_layouts);
1579
1580 init(*device, pl_ci, descriptor_layouts_unwrapped);
1581 }
1582
Reset()1583 void VkPipelineLayoutObj::Reset() { *this = VkPipelineLayoutObj(); }
1584
VkPipelineObj(VkDeviceObj * device)1585 VkPipelineObj::VkPipelineObj(VkDeviceObj *device) {
1586 m_device = device;
1587
1588 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1589 m_vi_state.pNext = nullptr;
1590 m_vi_state.flags = 0;
1591 m_vi_state.vertexBindingDescriptionCount = 0;
1592 m_vi_state.pVertexBindingDescriptions = nullptr;
1593 m_vi_state.vertexAttributeDescriptionCount = 0;
1594 m_vi_state.pVertexAttributeDescriptions = nullptr;
1595
1596 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1597 m_ia_state.pNext = nullptr;
1598 m_ia_state.flags = 0;
1599 m_ia_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1600 m_ia_state.primitiveRestartEnable = VK_FALSE;
1601
1602 m_te_state = nullptr;
1603
1604 m_vp_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1605 m_vp_state.pNext = VK_NULL_HANDLE;
1606 m_vp_state.flags = 0;
1607 m_vp_state.viewportCount = 1;
1608 m_vp_state.scissorCount = 1;
1609 m_vp_state.pViewports = nullptr;
1610 m_vp_state.pScissors = nullptr;
1611
1612 m_rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1613 m_rs_state.pNext = &m_line_state;
1614 m_rs_state.flags = 0;
1615 m_rs_state.depthClampEnable = VK_FALSE;
1616 m_rs_state.rasterizerDiscardEnable = VK_FALSE;
1617 m_rs_state.polygonMode = VK_POLYGON_MODE_FILL;
1618 m_rs_state.cullMode = VK_CULL_MODE_BACK_BIT;
1619 m_rs_state.frontFace = VK_FRONT_FACE_CLOCKWISE;
1620 m_rs_state.depthBiasEnable = VK_FALSE;
1621 m_rs_state.depthBiasConstantFactor = 0.0f;
1622 m_rs_state.depthBiasClamp = 0.0f;
1623 m_rs_state.depthBiasSlopeFactor = 0.0f;
1624 m_rs_state.lineWidth = 1.0f;
1625
1626 m_line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
1627 m_line_state.pNext = nullptr;
1628 m_line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
1629 m_line_state.stippledLineEnable = VK_FALSE;
1630 m_line_state.lineStippleFactor = 0;
1631 m_line_state.lineStipplePattern = 0;
1632
1633 m_ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1634 m_ms_state.pNext = nullptr;
1635 m_ms_state.flags = 0;
1636 m_ms_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1637 m_ms_state.sampleShadingEnable = VK_FALSE;
1638 m_ms_state.minSampleShading = 0.0f;
1639 m_ms_state.pSampleMask = nullptr;
1640 m_ms_state.alphaToCoverageEnable = VK_FALSE;
1641 m_ms_state.alphaToOneEnable = VK_FALSE;
1642
1643 m_ds_state = nullptr;
1644
1645 memset(&m_cb_state, 0, sizeof(m_cb_state));
1646 m_cb_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1647 m_cb_state.blendConstants[0] = 1.0f;
1648 m_cb_state.blendConstants[1] = 1.0f;
1649 m_cb_state.blendConstants[2] = 1.0f;
1650 m_cb_state.blendConstants[3] = 1.0f;
1651
1652 memset(&m_pd_state, 0, sizeof(m_pd_state));
1653 }
1654
AddShader(VkShaderObj * shader)1655 void VkPipelineObj::AddShader(VkShaderObj *shader) { m_shaderStages.push_back(shader->GetStageCreateInfo()); }
1656
AddShader(VkPipelineShaderStageCreateInfo const & createInfo)1657 void VkPipelineObj::AddShader(VkPipelineShaderStageCreateInfo const &createInfo) { m_shaderStages.push_back(createInfo); }
1658
AddVertexInputAttribs(VkVertexInputAttributeDescription * vi_attrib,uint32_t count)1659 void VkPipelineObj::AddVertexInputAttribs(VkVertexInputAttributeDescription *vi_attrib, uint32_t count) {
1660 m_vi_state.pVertexAttributeDescriptions = vi_attrib;
1661 m_vi_state.vertexAttributeDescriptionCount = count;
1662 }
1663
AddVertexInputBindings(VkVertexInputBindingDescription * vi_binding,uint32_t count)1664 void VkPipelineObj::AddVertexInputBindings(VkVertexInputBindingDescription *vi_binding, uint32_t count) {
1665 m_vi_state.pVertexBindingDescriptions = vi_binding;
1666 m_vi_state.vertexBindingDescriptionCount = count;
1667 }
1668
AddColorAttachment(uint32_t binding,const VkPipelineColorBlendAttachmentState & att)1669 void VkPipelineObj::AddColorAttachment(uint32_t binding, const VkPipelineColorBlendAttachmentState &att) {
1670 if (binding + 1 > m_colorAttachments.size()) {
1671 m_colorAttachments.resize(binding + 1);
1672 }
1673 m_colorAttachments[binding] = att;
1674 }
1675
SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo * ds_state)1676 void VkPipelineObj::SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo *ds_state) { m_ds_state = ds_state; }
1677
SetViewport(const vector<VkViewport> viewports)1678 void VkPipelineObj::SetViewport(const vector<VkViewport> viewports) {
1679 m_viewports = viewports;
1680 // If we explicitly set a null viewport, pass it through to create info
1681 // but preserve viewportCount because it musn't change
1682 if (m_viewports.size() == 0) {
1683 m_vp_state.pViewports = nullptr;
1684 }
1685 }
1686
SetScissor(const vector<VkRect2D> scissors)1687 void VkPipelineObj::SetScissor(const vector<VkRect2D> scissors) {
1688 m_scissors = scissors;
1689 // If we explicitly set a null scissor, pass it through to create info
1690 // but preserve scissorCount because it musn't change
1691 if (m_scissors.size() == 0) {
1692 m_vp_state.pScissors = nullptr;
1693 }
1694 }
1695
MakeDynamic(VkDynamicState state)1696 void VkPipelineObj::MakeDynamic(VkDynamicState state) {
1697 /* Only add a state once */
1698 for (auto it = m_dynamic_state_enables.begin(); it != m_dynamic_state_enables.end(); it++) {
1699 if ((*it) == state) return;
1700 }
1701 m_dynamic_state_enables.push_back(state);
1702 }
1703
SetMSAA(const VkPipelineMultisampleStateCreateInfo * ms_state)1704 void VkPipelineObj::SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state) { m_ms_state = *ms_state; }
1705
SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo * ia_state)1706 void VkPipelineObj::SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state) { m_ia_state = *ia_state; }
1707
SetRasterization(const VkPipelineRasterizationStateCreateInfo * rs_state)1708 void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) {
1709 m_rs_state = *rs_state;
1710 m_rs_state.pNext = &m_line_state;
1711 }
1712
SetTessellation(const VkPipelineTessellationStateCreateInfo * te_state)1713 void VkPipelineObj::SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state) { m_te_state = te_state; }
1714
SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT * line_state)1715 void VkPipelineObj::SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT *line_state) { m_line_state = *line_state; }
1716
InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo * gp_ci)1717 void VkPipelineObj::InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci) {
1718 gp_ci->stageCount = m_shaderStages.size();
1719 gp_ci->pStages = m_shaderStages.size() ? m_shaderStages.data() : nullptr;
1720
1721 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1722 gp_ci->pVertexInputState = &m_vi_state;
1723
1724 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1725 gp_ci->pInputAssemblyState = &m_ia_state;
1726
1727 gp_ci->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1728 gp_ci->pNext = NULL;
1729 gp_ci->flags = 0;
1730
1731 m_cb_state.attachmentCount = m_colorAttachments.size();
1732 m_cb_state.pAttachments = m_colorAttachments.data();
1733
1734 if (m_viewports.size() > 0) {
1735 m_vp_state.viewportCount = m_viewports.size();
1736 m_vp_state.pViewports = m_viewports.data();
1737 } else {
1738 MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
1739 }
1740
1741 if (m_scissors.size() > 0) {
1742 m_vp_state.scissorCount = m_scissors.size();
1743 m_vp_state.pScissors = m_scissors.data();
1744 } else {
1745 MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
1746 }
1747
1748 memset(&m_pd_state, 0, sizeof(m_pd_state));
1749 if (m_dynamic_state_enables.size() > 0) {
1750 m_pd_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1751 m_pd_state.dynamicStateCount = m_dynamic_state_enables.size();
1752 m_pd_state.pDynamicStates = m_dynamic_state_enables.data();
1753 gp_ci->pDynamicState = &m_pd_state;
1754 }
1755
1756 gp_ci->subpass = 0;
1757 gp_ci->pViewportState = &m_vp_state;
1758 gp_ci->pRasterizationState = &m_rs_state;
1759 gp_ci->pMultisampleState = &m_ms_state;
1760 gp_ci->pDepthStencilState = m_ds_state;
1761 gp_ci->pColorBlendState = &m_cb_state;
1762 gp_ci->pTessellationState = m_te_state;
1763 }
1764
CreateVKPipeline(VkPipelineLayout layout,VkRenderPass render_pass,VkGraphicsPipelineCreateInfo * gp_ci)1765 VkResult VkPipelineObj::CreateVKPipeline(VkPipelineLayout layout, VkRenderPass render_pass, VkGraphicsPipelineCreateInfo *gp_ci) {
1766 VkGraphicsPipelineCreateInfo info = {};
1767
1768 // if not given a CreateInfo, create and initialize a local one.
1769 if (gp_ci == nullptr) {
1770 gp_ci = &info;
1771 InitGraphicsPipelineCreateInfo(gp_ci);
1772 }
1773
1774 gp_ci->layout = layout;
1775 gp_ci->renderPass = render_pass;
1776
1777 return init_try(*m_device, *gp_ci);
1778 }
1779
VkCommandBufferObj(VkDeviceObj * device,VkCommandPoolObj * pool,VkCommandBufferLevel level,VkQueueObj * queue)1780 VkCommandBufferObj::VkCommandBufferObj(VkDeviceObj *device, VkCommandPoolObj *pool, VkCommandBufferLevel level, VkQueueObj *queue) {
1781 m_device = device;
1782 if (queue) {
1783 m_queue = queue;
1784 } else {
1785 m_queue = m_device->GetDefaultQueue();
1786 }
1787 assert(m_queue);
1788
1789 auto create_info = vk_testing::CommandBuffer::create_info(pool->handle());
1790 create_info.level = level;
1791 init(*device, create_info);
1792 }
1793
PipelineBarrier(VkPipelineStageFlags src_stages,VkPipelineStageFlags dest_stages,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1794 void VkCommandBufferObj::PipelineBarrier(VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
1795 VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
1796 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
1797 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
1798 const VkImageMemoryBarrier *pImageMemoryBarriers) {
1799 vkCmdPipelineBarrier(handle(), src_stages, dest_stages, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
1800 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1801 }
1802
ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> & color_objs,VkClearColorValue clear_color,VkDepthStencilObj * depth_stencil_obj,float depth_clear_value,uint32_t stencil_clear_value)1803 void VkCommandBufferObj::ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> &color_objs, VkClearColorValue clear_color,
1804 VkDepthStencilObj *depth_stencil_obj, float depth_clear_value,
1805 uint32_t stencil_clear_value) {
1806 // whatever we want to do, we do it to the whole buffer
1807 VkImageSubresourceRange subrange = {};
1808 // srRange.aspectMask to be set later
1809 subrange.baseMipLevel = 0;
1810 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
1811 subrange.levelCount = 1; // VK_REMAINING_MIP_LEVELS;
1812 subrange.baseArrayLayer = 0;
1813 // TODO: Mesa crashing with VK_REMAINING_ARRAY_LAYERS
1814 subrange.layerCount = 1; // VK_REMAINING_ARRAY_LAYERS;
1815
1816 const VkImageLayout clear_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1817
1818 for (const auto &color_obj : color_objs) {
1819 subrange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1820 color_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
1821 color_obj->SetLayout(this, subrange.aspectMask, clear_layout);
1822 ClearColorImage(color_obj->image(), clear_layout, &clear_color, 1, &subrange);
1823 }
1824
1825 if (depth_stencil_obj && depth_stencil_obj->Initialized()) {
1826 subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1827 if (FormatIsDepthOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1828 if (FormatIsStencilOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1829
1830 depth_stencil_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
1831 depth_stencil_obj->SetLayout(this, subrange.aspectMask, clear_layout);
1832
1833 VkClearDepthStencilValue clear_value = {depth_clear_value, stencil_clear_value};
1834 ClearDepthStencilImage(depth_stencil_obj->handle(), clear_layout, &clear_value, 1, &subrange);
1835 }
1836 }
1837
FillBuffer(VkBuffer buffer,VkDeviceSize offset,VkDeviceSize fill_size,uint32_t data)1838 void VkCommandBufferObj::FillBuffer(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize fill_size, uint32_t data) {
1839 vkCmdFillBuffer(handle(), buffer, offset, fill_size, data);
1840 }
1841
UpdateBuffer(VkBuffer buffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)1842 void VkCommandBufferObj::UpdateBuffer(VkBuffer buffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
1843 vkCmdUpdateBuffer(handle(), buffer, dstOffset, dataSize, pData);
1844 }
1845
CopyImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1846 void VkCommandBufferObj::CopyImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
1847 uint32_t regionCount, const VkImageCopy *pRegions) {
1848 vkCmdCopyImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
1849 }
1850
ResolveImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1851 void VkCommandBufferObj::ResolveImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1852 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) {
1853 vkCmdResolveImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
1854 }
1855
ClearColorImage(VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1856 void VkCommandBufferObj::ClearColorImage(VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor,
1857 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
1858 vkCmdClearColorImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
1859 }
1860
ClearDepthStencilImage(VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1861 void VkCommandBufferObj::ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor,
1862 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
1863 vkCmdClearDepthStencilImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
1864 }
1865
BuildAccelerationStructure(VkAccelerationStructureObj * as,VkBuffer scratchBuffer)1866 void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer) {
1867 BuildAccelerationStructure(as, scratchBuffer, VK_NULL_HANDLE);
1868 }
1869
BuildAccelerationStructure(VkAccelerationStructureObj * as,VkBuffer scratchBuffer,VkBuffer instanceData)1870 void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer, VkBuffer instanceData) {
1871 PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
1872 (PFN_vkCmdBuildAccelerationStructureNV)vkGetDeviceProcAddr(as->dev(), "vkCmdBuildAccelerationStructureNV");
1873 assert(vkCmdBuildAccelerationStructureNV != nullptr);
1874
1875 vkCmdBuildAccelerationStructureNV(handle(), &as->info(), instanceData, 0, VK_FALSE, as->handle(), VK_NULL_HANDLE, scratchBuffer,
1876 0);
1877 }
1878
PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> & color_atts,VkDepthStencilObj * depth_stencil_att)1879 void VkCommandBufferObj::PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> &color_atts,
1880 VkDepthStencilObj *depth_stencil_att) {
1881 for (const auto &color_att : color_atts) {
1882 color_att->SetLayout(this, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
1883 }
1884
1885 if (depth_stencil_att && depth_stencil_att->Initialized()) {
1886 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1887 if (FormatIsDepthOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1888 if (FormatIsStencilOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1889
1890 depth_stencil_att->SetLayout(this, aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
1891 }
1892 }
1893
BeginRenderPass(const VkRenderPassBeginInfo & info)1894 void VkCommandBufferObj::BeginRenderPass(const VkRenderPassBeginInfo &info) {
1895 vkCmdBeginRenderPass(handle(), &info, VK_SUBPASS_CONTENTS_INLINE);
1896 }
1897
EndRenderPass()1898 void VkCommandBufferObj::EndRenderPass() { vkCmdEndRenderPass(handle()); }
1899
SetViewport(uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1900 void VkCommandBufferObj::SetViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) {
1901 vkCmdSetViewport(handle(), firstViewport, viewportCount, pViewports);
1902 }
1903
SetStencilReference(VkStencilFaceFlags faceMask,uint32_t reference)1904 void VkCommandBufferObj::SetStencilReference(VkStencilFaceFlags faceMask, uint32_t reference) {
1905 vkCmdSetStencilReference(handle(), faceMask, reference);
1906 }
1907
DrawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1908 void VkCommandBufferObj::DrawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
1909 uint32_t firstInstance) {
1910 vkCmdDrawIndexed(handle(), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1911 }
1912
Draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1913 void VkCommandBufferObj::Draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1914 vkCmdDraw(handle(), vertexCount, instanceCount, firstVertex, firstInstance);
1915 }
1916
QueueCommandBuffer(bool checkSuccess)1917 void VkCommandBufferObj::QueueCommandBuffer(bool checkSuccess) {
1918 VkFenceObj nullFence;
1919 QueueCommandBuffer(nullFence, checkSuccess);
1920 }
1921
QueueCommandBuffer(const VkFenceObj & fence,bool checkSuccess)1922 void VkCommandBufferObj::QueueCommandBuffer(const VkFenceObj &fence, bool checkSuccess) {
1923 VkResult err = VK_SUCCESS;
1924
1925 err = m_queue->submit(*this, fence, checkSuccess);
1926 if (checkSuccess) {
1927 ASSERT_VK_SUCCESS(err);
1928 }
1929
1930 err = m_queue->wait();
1931 if (checkSuccess) {
1932 ASSERT_VK_SUCCESS(err);
1933 }
1934
1935 // TODO: Determine if we really want this serialization here
1936 // Wait for work to finish before cleaning up.
1937 vkDeviceWaitIdle(m_device->device());
1938 }
1939
BindDescriptorSet(VkDescriptorSetObj & descriptorSet)1940 void VkCommandBufferObj::BindDescriptorSet(VkDescriptorSetObj &descriptorSet) {
1941 VkDescriptorSet set_obj = descriptorSet.GetDescriptorSetHandle();
1942
1943 // bind pipeline, vertex buffer (descriptor set) and WVP (dynamic buffer view)
1944 if (set_obj) {
1945 vkCmdBindDescriptorSets(handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, descriptorSet.GetPipelineLayout(), 0, 1, &set_obj, 0,
1946 NULL);
1947 }
1948 }
1949
BindIndexBuffer(VkBufferObj * indexBuffer,VkDeviceSize offset,VkIndexType indexType)1950 void VkCommandBufferObj::BindIndexBuffer(VkBufferObj *indexBuffer, VkDeviceSize offset, VkIndexType indexType) {
1951 vkCmdBindIndexBuffer(handle(), indexBuffer->handle(), offset, indexType);
1952 }
1953
BindVertexBuffer(VkConstantBufferObj * vertexBuffer,VkDeviceSize offset,uint32_t binding)1954 void VkCommandBufferObj::BindVertexBuffer(VkConstantBufferObj *vertexBuffer, VkDeviceSize offset, uint32_t binding) {
1955 vkCmdBindVertexBuffers(handle(), binding, 1, &vertexBuffer->handle(), &offset);
1956 }
1957
VkCommandPoolObj(VkDeviceObj * device,uint32_t queue_family_index,VkCommandPoolCreateFlags flags)1958 VkCommandPoolObj::VkCommandPoolObj(VkDeviceObj *device, uint32_t queue_family_index, VkCommandPoolCreateFlags flags) {
1959 init(*device, vk_testing::CommandPool::create_info(queue_family_index, flags));
1960 }
1961
Initialized()1962 bool VkDepthStencilObj::Initialized() { return m_initialized; }
VkDepthStencilObj(VkDeviceObj * device)1963 VkDepthStencilObj::VkDepthStencilObj(VkDeviceObj *device) : VkImageObj(device) { m_initialized = false; }
1964
BindInfo()1965 VkImageView *VkDepthStencilObj::BindInfo() { return &m_attachmentBindInfo; }
1966
Format() const1967 VkFormat VkDepthStencilObj::Format() const { return this->m_depth_stencil_fmt; }
1968
Init(VkDeviceObj * device,int32_t width,int32_t height,VkFormat format,VkImageUsageFlags usage)1969 void VkDepthStencilObj::Init(VkDeviceObj *device, int32_t width, int32_t height, VkFormat format, VkImageUsageFlags usage) {
1970 VkImageViewCreateInfo view_info = {};
1971
1972 m_device = device;
1973 m_initialized = true;
1974 m_depth_stencil_fmt = format;
1975
1976 /* create image */
1977 VkImageObj::Init(width, height, 1, m_depth_stencil_fmt, usage, VK_IMAGE_TILING_OPTIMAL);
1978
1979 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1980 if (FormatIsDepthOnly(format))
1981 aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1982 else if (FormatIsStencilOnly(format))
1983 aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1984
1985 SetLayout(aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
1986
1987 view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1988 view_info.pNext = NULL;
1989 view_info.image = VK_NULL_HANDLE;
1990 view_info.subresourceRange.aspectMask = aspect;
1991 view_info.subresourceRange.baseMipLevel = 0;
1992 view_info.subresourceRange.levelCount = 1;
1993 view_info.subresourceRange.baseArrayLayer = 0;
1994 view_info.subresourceRange.layerCount = 1;
1995 view_info.flags = 0;
1996 view_info.format = m_depth_stencil_fmt;
1997 view_info.image = handle();
1998 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
1999 m_imageView.init(*m_device, view_info);
2000
2001 m_attachmentBindInfo = m_imageView.handle();
2002 }
2003