1 /*
2 * Copyright (c) 2015-2019 The Khronos Group Inc.
3 * Copyright (c) 2015-2019 Valve Corporation
4 * Copyright (c) 2015-2019 LunarG, Inc.
5 * Copyright (c) 2015-2019 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
20 * Author: Tony Barbour <tony@LunarG.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 */
23
24 #include "vkrenderframework.h"
25 #include "vk_format_utils.h"
26
27 #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
28 #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
29 { \
30 fp##entrypoint = (PFN_vk##entrypoint)vkGetDeviceProcAddr(dev, "vk" #entrypoint); \
31 assert(fp##entrypoint != NULL); \
32 }
33
VkRenderFramework()34 VkRenderFramework::VkRenderFramework()
35 : inst(VK_NULL_HANDLE),
36 m_device(NULL),
37 m_commandPool(VK_NULL_HANDLE),
38 m_commandBuffer(NULL),
39 m_renderPass(VK_NULL_HANDLE),
40 m_framebuffer(VK_NULL_HANDLE),
41 m_addRenderPassSelfDependency(false),
42 m_width(256.0), // default window width
43 m_height(256.0), // default window height
44 m_render_target_fmt(VK_FORMAT_R8G8B8A8_UNORM),
45 m_depth_stencil_fmt(VK_FORMAT_UNDEFINED),
46 m_clear_via_load_op(true),
47 m_depth_clear_color(1.0),
48 m_stencil_clear_color(0),
49 m_depthStencil(NULL),
50 m_CreateDebugReportCallback(VK_NULL_HANDLE),
51 m_DestroyDebugReportCallback(VK_NULL_HANDLE),
52 m_globalMsgCallback(VK_NULL_HANDLE),
53 m_devMsgCallback(VK_NULL_HANDLE) {
54 memset(&m_renderPassBeginInfo, 0, sizeof(m_renderPassBeginInfo));
55 m_renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
56
57 // clear the back buffer to dark grey
58 m_clear_color.float32[0] = 0.25f;
59 m_clear_color.float32[1] = 0.25f;
60 m_clear_color.float32[2] = 0.25f;
61 m_clear_color.float32[3] = 0.0f;
62 }
63
~VkRenderFramework()64 VkRenderFramework::~VkRenderFramework() {}
65
gpu()66 VkPhysicalDevice VkRenderFramework::gpu() {
67 EXPECT_NE((VkInstance)0, inst); // Invalid to request gpu before instance exists
68 return objs[0];
69 }
70
71 // Return true if layer name is found and spec+implementation values are >= requested values
InstanceLayerSupported(const char * name,uint32_t spec,uint32_t implementation)72 bool VkRenderFramework::InstanceLayerSupported(const char *name, uint32_t spec, uint32_t implementation) {
73 uint32_t layer_count = 0;
74 std::vector<VkLayerProperties> layer_props;
75
76 VkResult res = vkEnumerateInstanceLayerProperties(&layer_count, NULL);
77 if (VK_SUCCESS != res) return false;
78 if (0 == layer_count) return false;
79
80 layer_props.resize(layer_count);
81 res = vkEnumerateInstanceLayerProperties(&layer_count, layer_props.data());
82 if (VK_SUCCESS != res) return false;
83
84 for (auto &it : layer_props) {
85 if (0 == strncmp(name, it.layerName, VK_MAX_EXTENSION_NAME_SIZE)) {
86 return ((it.specVersion >= spec) && (it.implementationVersion >= implementation));
87 }
88 }
89 return false;
90 }
91
92 // Enable device profile as last layer on stack overriding devsim if there, or return if not available
EnableDeviceProfileLayer()93 bool VkRenderFramework::EnableDeviceProfileLayer() {
94 if (InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
95 if (VkTestFramework::m_devsim_layer) {
96 assert(0 == strcmp(m_instance_layer_names.back(), "VK_LAYER_LUNARG_device_simulation"));
97 m_instance_layer_names.pop_back();
98 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
99 } else {
100 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
101 }
102 } else {
103 printf(" Did not find VK_LAYER_LUNARG_device_profile_api layer; skipped.\n");
104 return false;
105 }
106 return true;
107 }
108
109 // Return true if extension name is found and spec value is >= requested spec value
InstanceExtensionSupported(const char * ext_name,uint32_t spec)110 bool VkRenderFramework::InstanceExtensionSupported(const char *ext_name, uint32_t spec) {
111 uint32_t ext_count = 0;
112 std::vector<VkExtensionProperties> ext_props;
113 VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, nullptr);
114 if (VK_SUCCESS != res) return false;
115 if (0 == ext_count) return false;
116
117 ext_props.resize(ext_count);
118 res = vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, ext_props.data());
119 if (VK_SUCCESS != res) return false;
120
121 for (auto &it : ext_props) {
122 if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) {
123 return (it.specVersion >= spec);
124 }
125 }
126 return false;
127 }
128
129 // Return true if instance exists and extension name is in the list
InstanceExtensionEnabled(const char * ext_name)130 bool VkRenderFramework::InstanceExtensionEnabled(const char *ext_name) {
131 if (!inst) return false;
132
133 bool ext_found = false;
134 for (auto ext : m_instance_extension_names) {
135 if (!strcmp(ext, ext_name)) {
136 ext_found = true;
137 break;
138 }
139 }
140 return ext_found;
141 }
142
143 // Return true if extension name is found and spec value is >= requested spec value
DeviceExtensionSupported(VkPhysicalDevice dev,const char * layer,const char * ext_name,uint32_t spec)144 bool VkRenderFramework::DeviceExtensionSupported(VkPhysicalDevice dev, const char *layer, const char *ext_name, uint32_t spec) {
145 if (!inst) {
146 EXPECT_NE((VkInstance)0, inst); // Complain, not cool without an instance
147 return false;
148 }
149 uint32_t ext_count = 0;
150 std::vector<VkExtensionProperties> ext_props;
151 VkResult res = vkEnumerateDeviceExtensionProperties(dev, layer, &ext_count, nullptr);
152 if (VK_SUCCESS != res) return false;
153 if (0 == ext_count) return false;
154
155 ext_props.resize(ext_count);
156 res = vkEnumerateDeviceExtensionProperties(dev, layer, &ext_count, ext_props.data());
157 if (VK_SUCCESS != res) return false;
158
159 for (auto &it : ext_props) {
160 if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) {
161 return (it.specVersion >= spec);
162 }
163 }
164 return false;
165 }
166
167 // Return true if device is created and extension name is found in the list
DeviceExtensionEnabled(const char * ext_name)168 bool VkRenderFramework::DeviceExtensionEnabled(const char *ext_name) {
169 if (NULL == m_device) return false;
170
171 bool ext_found = false;
172 for (auto ext : m_device_extension_names) {
173 if (!strcmp(ext, ext_name)) {
174 ext_found = true;
175 break;
176 }
177 }
178 return ext_found;
179 }
180
181 // WARNING: The DevSim layer can override the properties that are tested here, making the result of
182 // this function dubious when DevSim is active.
DeviceIsMockICD()183 bool VkRenderFramework::DeviceIsMockICD() {
184 VkPhysicalDeviceProperties props = vk_testing::PhysicalDevice(gpu()).properties();
185 if ((props.vendorID == 0xba5eba11) && (props.deviceID == 0xf005ba11) && (0 == strcmp("Vulkan Mock Device", props.deviceName))) {
186 return true;
187 }
188 return false;
189 }
190
191 // Render into a RenderTarget and read the pixels back to see if the device can really draw.
192 // Note: This cannot be called from inside an initialized VkRenderFramework because frameworks cannot be "nested".
193 // It is best to call it before "Init()".
DeviceCanDraw()194 bool VkRenderFramework::DeviceCanDraw() {
195 InitFramework(NULL, NULL);
196 InitState(NULL, NULL, 0);
197 InitViewport();
198 InitRenderTarget();
199
200 // Draw a triangle that covers the entire viewport.
201 char const *vsSource =
202 "#version 450\n"
203 "\n"
204 "vec2 vertices[3];\n"
205 "void main() { \n"
206 " vertices[0] = vec2(-10.0, -10.0);\n"
207 " vertices[1] = vec2( 10.0, -10.0);\n"
208 " vertices[2] = vec2( 0.0, 10.0);\n"
209 " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
210 "}\n";
211 // Draw with a solid color.
212 char const *fsSource =
213 "#version 450\n"
214 "\n"
215 "layout(location=0) out vec4 color;\n"
216 "void main() {\n"
217 " color = vec4(32.0/255.0);\n"
218 "}\n";
219 VkShaderObj *vs = new VkShaderObj(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
220 VkShaderObj *fs = new VkShaderObj(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
221
222 VkPipelineObj *pipe = new VkPipelineObj(m_device);
223 pipe->AddShader(vs);
224 pipe->AddShader(fs);
225 pipe->AddDefaultColorAttachment();
226
227 VkDescriptorSetObj *descriptorSet = new VkDescriptorSetObj(m_device);
228 descriptorSet->CreateVKDescriptorSet(m_commandBuffer);
229
230 pipe->CreateVKPipeline(descriptorSet->GetPipelineLayout(), renderPass());
231
232 m_commandBuffer->begin();
233 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
234
235 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe->handle());
236 m_commandBuffer->BindDescriptorSet(*descriptorSet);
237
238 VkViewport viewport = m_viewports[0];
239 VkRect2D scissors = m_scissors[0];
240
241 vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
242 vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissors);
243
244 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
245
246 m_commandBuffer->EndRenderPass();
247 m_commandBuffer->end();
248
249 VkSubmitInfo submit_info = {};
250 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
251 submit_info.commandBufferCount = 1;
252 submit_info.pCommandBuffers = &m_commandBuffer->handle();
253
254 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
255 vkQueueWaitIdle(m_device->m_queue);
256
257 auto pixels = m_renderTargets[0]->Read();
258
259 delete descriptorSet;
260 delete pipe;
261 delete fs;
262 delete vs;
263 ShutdownFramework();
264 return pixels[0][0] == 0x20202020;
265 }
266
InitFramework(PFN_vkDebugReportCallbackEXT dbgFunction,void * userData,void * instance_pnext)267 void VkRenderFramework::InitFramework(PFN_vkDebugReportCallbackEXT dbgFunction, void *userData, void *instance_pnext) {
268 // Only enable device profile layer by default if devsim is not enabled
269 if (!VkTestFramework::m_devsim_layer && InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
270 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
271 }
272
273 // Assert not already initialized
274 ASSERT_EQ((VkInstance)0, inst);
275
276 // Remove any unsupported layer names from list
277 for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end();) {
278 if (!InstanceLayerSupported(*layer)) {
279 ADD_FAILURE() << "InitFramework(): Requested layer " << *layer << " was not found. Disabled.";
280 layer = m_instance_layer_names.erase(layer);
281 } else {
282 ++layer;
283 }
284 }
285
286 // Remove any unsupported instance extension names from list
287 for (auto ext = m_instance_extension_names.begin(); ext != m_instance_extension_names.end();) {
288 if (!InstanceExtensionSupported(*ext)) {
289 ADD_FAILURE() << "InitFramework(): Requested extension " << *ext << " was not found. Disabled.";
290 ext = m_instance_extension_names.erase(ext);
291 } else {
292 ++ext;
293 }
294 }
295
296 VkInstanceCreateInfo instInfo = {};
297 VkResult U_ASSERT_ONLY err;
298
299 instInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
300 instInfo.pNext = instance_pnext;
301 instInfo.pApplicationInfo = &app_info;
302 instInfo.enabledLayerCount = m_instance_layer_names.size();
303 instInfo.ppEnabledLayerNames = m_instance_layer_names.data();
304 instInfo.enabledExtensionCount = m_instance_extension_names.size();
305 instInfo.ppEnabledExtensionNames = m_instance_extension_names.data();
306
307 VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
308 if (dbgFunction) {
309 // Enable create time debug messages
310 memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
311 dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
312 dbgCreateInfo.flags =
313 VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
314 dbgCreateInfo.pfnCallback = dbgFunction;
315 dbgCreateInfo.pUserData = userData;
316
317 dbgCreateInfo.pNext = instInfo.pNext;
318 instInfo.pNext = &dbgCreateInfo;
319 }
320
321 err = vkCreateInstance(&instInfo, NULL, &this->inst);
322 ASSERT_VK_SUCCESS(err);
323
324 err = vkEnumeratePhysicalDevices(inst, &this->gpu_count, NULL);
325 ASSERT_LE(this->gpu_count, ARRAY_SIZE(objs)) << "Too many gpus";
326 ASSERT_VK_SUCCESS(err);
327 err = vkEnumeratePhysicalDevices(inst, &this->gpu_count, objs);
328 ASSERT_VK_SUCCESS(err);
329 ASSERT_GE(this->gpu_count, (uint32_t)1) << "No GPU available";
330 if (dbgFunction) {
331 m_CreateDebugReportCallback =
332 (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(this->inst, "vkCreateDebugReportCallbackEXT");
333 ASSERT_NE(m_CreateDebugReportCallback, (PFN_vkCreateDebugReportCallbackEXT)NULL)
334 << "Did not get function pointer for CreateDebugReportCallback";
335 if (m_CreateDebugReportCallback) {
336 dbgCreateInfo.pNext = nullptr; // clean up from usage in CreateInstance above
337 err = m_CreateDebugReportCallback(this->inst, &dbgCreateInfo, NULL, &m_globalMsgCallback);
338 ASSERT_VK_SUCCESS(err);
339
340 m_DestroyDebugReportCallback =
341 (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(this->inst, "vkDestroyDebugReportCallbackEXT");
342 ASSERT_NE(m_DestroyDebugReportCallback, (PFN_vkDestroyDebugReportCallbackEXT)NULL)
343 << "Did not get function pointer for DestroyDebugReportCallback";
344 m_DebugReportMessage = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(this->inst, "vkDebugReportMessageEXT");
345 ASSERT_NE(m_DebugReportMessage, (PFN_vkDebugReportMessageEXT)NULL)
346 << "Did not get function pointer for DebugReportMessage";
347 }
348 }
349 }
350
ShutdownFramework()351 void VkRenderFramework::ShutdownFramework() {
352 // Nothing to shut down without a VkInstance
353 if (!this->inst) return;
354
355 delete m_commandBuffer;
356 m_commandBuffer = nullptr;
357 delete m_commandPool;
358 m_commandPool = nullptr;
359 if (m_framebuffer) vkDestroyFramebuffer(device(), m_framebuffer, NULL);
360 m_framebuffer = VK_NULL_HANDLE;
361 if (m_renderPass) vkDestroyRenderPass(device(), m_renderPass, NULL);
362 m_renderPass = VK_NULL_HANDLE;
363
364 if (m_globalMsgCallback) m_DestroyDebugReportCallback(this->inst, m_globalMsgCallback, NULL);
365 m_globalMsgCallback = VK_NULL_HANDLE;
366 if (m_devMsgCallback) m_DestroyDebugReportCallback(this->inst, m_devMsgCallback, NULL);
367 m_devMsgCallback = VK_NULL_HANDLE;
368
369 m_renderTargets.clear();
370
371 delete m_depthStencil;
372 m_depthStencil = nullptr;
373
374 // reset the driver
375 delete m_device;
376 m_device = nullptr;
377
378 if (this->inst) vkDestroyInstance(this->inst, NULL);
379 this->inst = (VkInstance)0; // In case we want to re-initialize
380 }
381
GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures * features)382 void VkRenderFramework::GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures *features) {
383 if (NULL == m_device) {
384 VkDeviceObj *temp_device = new VkDeviceObj(0, objs[0], m_device_extension_names);
385 *features = temp_device->phy().features();
386 delete (temp_device);
387 } else {
388 *features = m_device->phy().features();
389 }
390 }
391
GetPhysicalDeviceProperties(VkPhysicalDeviceProperties * props)392 void VkRenderFramework::GetPhysicalDeviceProperties(VkPhysicalDeviceProperties *props) {
393 *props = vk_testing::PhysicalDevice(gpu()).properties();
394 }
395
InitState(VkPhysicalDeviceFeatures * features,void * create_device_pnext,const VkCommandPoolCreateFlags flags)396 void VkRenderFramework::InitState(VkPhysicalDeviceFeatures *features, void *create_device_pnext,
397 const VkCommandPoolCreateFlags flags) {
398 // Remove any unsupported device extension names from list
399 for (auto ext = m_device_extension_names.begin(); ext != m_device_extension_names.end();) {
400 if (!DeviceExtensionSupported(objs[0], nullptr, *ext)) {
401 bool found = false;
402 for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end(); ++layer) {
403 if (DeviceExtensionSupported(objs[0], *layer, *ext)) {
404 found = true;
405 break;
406 }
407 }
408 if (!found) {
409 ADD_FAILURE() << "InitState(): The requested device extension " << *ext << " was not found. Disabled.";
410 ext = m_device_extension_names.erase(ext);
411 } else {
412 ++ext;
413 }
414 } else {
415 ++ext;
416 }
417 }
418
419 m_device = new VkDeviceObj(0, objs[0], m_device_extension_names, features, create_device_pnext);
420 m_device->SetDeviceQueue();
421
422 m_depthStencil = new VkDepthStencilObj(m_device);
423
424 m_render_target_fmt = VkTestFramework::GetFormat(inst, m_device);
425
426 m_lineWidth = 1.0f;
427
428 m_depthBiasConstantFactor = 0.0f;
429 m_depthBiasClamp = 0.0f;
430 m_depthBiasSlopeFactor = 0.0f;
431
432 m_blendConstants[0] = 1.0f;
433 m_blendConstants[1] = 1.0f;
434 m_blendConstants[2] = 1.0f;
435 m_blendConstants[3] = 1.0f;
436
437 m_minDepthBounds = 0.f;
438 m_maxDepthBounds = 1.f;
439
440 m_compareMask = 0xff;
441 m_writeMask = 0xff;
442 m_reference = 0;
443
444 m_commandPool = new VkCommandPoolObj(m_device, m_device->graphics_queue_node_index_, flags);
445
446 m_commandBuffer = new VkCommandBufferObj(m_device, m_commandPool);
447 }
448
InitViewport(float width,float height)449 void VkRenderFramework::InitViewport(float width, float height) {
450 VkViewport viewport;
451 VkRect2D scissor;
452 viewport.x = 0;
453 viewport.y = 0;
454 viewport.width = 1.f * width;
455 viewport.height = 1.f * height;
456 viewport.minDepth = 0.f;
457 viewport.maxDepth = 1.f;
458 m_viewports.push_back(viewport);
459
460 scissor.extent.width = (int32_t)width;
461 scissor.extent.height = (int32_t)height;
462 scissor.offset.x = 0;
463 scissor.offset.y = 0;
464 m_scissors.push_back(scissor);
465
466 m_width = width;
467 m_height = height;
468 }
469
InitViewport()470 void VkRenderFramework::InitViewport() { InitViewport(m_width, m_height); }
InitRenderTarget()471 void VkRenderFramework::InitRenderTarget() { InitRenderTarget(1); }
472
InitRenderTarget(uint32_t targets)473 void VkRenderFramework::InitRenderTarget(uint32_t targets) { InitRenderTarget(targets, NULL); }
474
InitRenderTarget(VkImageView * dsBinding)475 void VkRenderFramework::InitRenderTarget(VkImageView *dsBinding) { InitRenderTarget(1, dsBinding); }
476
InitRenderTarget(uint32_t targets,VkImageView * dsBinding)477 void VkRenderFramework::InitRenderTarget(uint32_t targets, VkImageView *dsBinding) {
478 std::vector<VkAttachmentDescription> attachments;
479 std::vector<VkAttachmentReference> color_references;
480 std::vector<VkImageView> bindings;
481 attachments.reserve(targets + 1); // +1 for dsBinding
482 color_references.reserve(targets);
483 bindings.reserve(targets + 1); // +1 for dsBinding
484
485 VkAttachmentDescription att = {};
486 att.format = m_render_target_fmt;
487 att.samples = VK_SAMPLE_COUNT_1_BIT;
488 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
489 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
490 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
491 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
492 att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
493 att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
494
495 VkAttachmentReference ref = {};
496 ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
497
498 m_renderPassClearValues.clear();
499 VkClearValue clear = {};
500 clear.color = m_clear_color;
501
502 for (uint32_t i = 0; i < targets; i++) {
503 attachments.push_back(att);
504
505 ref.attachment = i;
506 color_references.push_back(ref);
507
508 m_renderPassClearValues.push_back(clear);
509
510 std::unique_ptr<VkImageObj> img(new VkImageObj(m_device));
511
512 VkFormatProperties props;
513
514 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), m_render_target_fmt, &props);
515
516 if (props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
517 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
518 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
519 VK_IMAGE_TILING_LINEAR);
520 } else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
521 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
522 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
523 VK_IMAGE_TILING_OPTIMAL);
524 } else {
525 FAIL() << "Neither Linear nor Optimal allowed for render target";
526 }
527
528 bindings.push_back(img->targetView(m_render_target_fmt));
529 m_renderTargets.push_back(std::move(img));
530 }
531
532 VkSubpassDescription subpass = {};
533 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
534 subpass.flags = 0;
535 subpass.inputAttachmentCount = 0;
536 subpass.pInputAttachments = NULL;
537 subpass.colorAttachmentCount = targets;
538 subpass.pColorAttachments = color_references.data();
539 subpass.pResolveAttachments = NULL;
540
541 VkAttachmentReference ds_reference;
542 if (dsBinding) {
543 att.format = m_depth_stencil_fmt;
544 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
545 ;
546 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
547 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
548 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
549 att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
550 att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
551 attachments.push_back(att);
552
553 clear.depthStencil.depth = m_depth_clear_color;
554 clear.depthStencil.stencil = m_stencil_clear_color;
555 m_renderPassClearValues.push_back(clear);
556
557 bindings.push_back(*dsBinding);
558
559 ds_reference.attachment = targets;
560 ds_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
561 subpass.pDepthStencilAttachment = &ds_reference;
562 } else {
563 subpass.pDepthStencilAttachment = NULL;
564 }
565
566 subpass.preserveAttachmentCount = 0;
567 subpass.pPreserveAttachments = NULL;
568
569 VkRenderPassCreateInfo rp_info = {};
570 rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
571 rp_info.attachmentCount = attachments.size();
572 rp_info.pAttachments = attachments.data();
573 rp_info.subpassCount = 1;
574 rp_info.pSubpasses = &subpass;
575 VkSubpassDependency subpass_dep = {};
576 if (m_addRenderPassSelfDependency) {
577 // Add a subpass self-dependency to subpass 0 of default renderPass
578 subpass_dep.srcSubpass = 0;
579 subpass_dep.dstSubpass = 0;
580 // Just using all framebuffer-space pipeline stages in order to get a reasonably large
581 // set of bits that can be used for both src & dst
582 subpass_dep.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
583 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
584 subpass_dep.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
585 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
586 // Add all of the gfx mem access bits that correlate to the fb-space pipeline stages
587 subpass_dep.srcAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
588 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
589 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
590 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
591 subpass_dep.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
592 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
593 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
594 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
595 // Must include dep_by_region bit when src & dst both include framebuffer-space stages
596 subpass_dep.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
597 rp_info.dependencyCount = 1;
598 rp_info.pDependencies = &subpass_dep;
599 }
600
601 vkCreateRenderPass(device(), &rp_info, NULL, &m_renderPass);
602 renderPass_info_ = rp_info; // Save away a copy for tests that need access to the render pass state
603 // Create Framebuffer and RenderPass with color attachments and any
604 // depth/stencil attachment
605 VkFramebufferCreateInfo fb_info = {};
606 fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
607 fb_info.pNext = NULL;
608 fb_info.renderPass = m_renderPass;
609 fb_info.attachmentCount = bindings.size();
610 fb_info.pAttachments = bindings.data();
611 fb_info.width = (uint32_t)m_width;
612 fb_info.height = (uint32_t)m_height;
613 fb_info.layers = 1;
614
615 vkCreateFramebuffer(device(), &fb_info, NULL, &m_framebuffer);
616
617 m_renderPassBeginInfo.renderPass = m_renderPass;
618 m_renderPassBeginInfo.framebuffer = m_framebuffer;
619 m_renderPassBeginInfo.renderArea.extent.width = (int32_t)m_width;
620 m_renderPassBeginInfo.renderArea.extent.height = (int32_t)m_height;
621 m_renderPassBeginInfo.clearValueCount = m_renderPassClearValues.size();
622 m_renderPassBeginInfo.pClearValues = m_renderPassClearValues.data();
623 }
624
DestroyRenderTarget()625 void VkRenderFramework::DestroyRenderTarget() {
626 vkDestroyRenderPass(device(), m_renderPass, nullptr);
627 m_renderPass = VK_NULL_HANDLE;
628 vkDestroyFramebuffer(device(), m_framebuffer, nullptr);
629 m_framebuffer = VK_NULL_HANDLE;
630 }
631
VkDeviceObj(uint32_t id,VkPhysicalDevice obj)632 VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj) : vk_testing::Device(obj), id(id) {
633 init();
634
635 props = phy().properties();
636 queue_props = phy().queue_properties();
637 }
638
VkDeviceObj(uint32_t id,VkPhysicalDevice obj,std::vector<const char * > & extension_names,VkPhysicalDeviceFeatures * features,void * create_device_pnext)639 VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj, std::vector<const char *> &extension_names,
640 VkPhysicalDeviceFeatures *features, void *create_device_pnext)
641 : vk_testing::Device(obj), id(id) {
642 init(extension_names, features, create_device_pnext);
643
644 props = phy().properties();
645 queue_props = phy().queue_properties();
646 }
647
QueueFamilyMatching(VkQueueFlags with,VkQueueFlags without,bool all_bits)648 uint32_t VkDeviceObj::QueueFamilyMatching(VkQueueFlags with, VkQueueFlags without, bool all_bits) {
649 // Find a queue family with and without desired capabilities
650 for (uint32_t i = 0; i < queue_props.size(); i++) {
651 auto flags = queue_props[i].queueFlags;
652 bool matches = all_bits ? (flags & with) == with : (flags & with) != 0;
653 if (matches && ((flags & without) == 0) && (queue_props[i].queueCount > 0)) {
654 return i;
655 }
656 }
657 return UINT32_MAX;
658 }
659
SetDeviceQueue()660 void VkDeviceObj::SetDeviceQueue() {
661 ASSERT_NE(true, graphics_queues().empty());
662 m_queue = graphics_queues()[0]->handle();
663 }
664
GetDefaultQueue()665 VkQueueObj *VkDeviceObj::GetDefaultQueue() {
666 if (graphics_queues().empty()) return nullptr;
667 return graphics_queues()[0];
668 }
VkDescriptorSetLayoutObj(const VkDeviceObj * device,const std::vector<VkDescriptorSetLayoutBinding> & descriptor_set_bindings,VkDescriptorSetLayoutCreateFlags flags)669 VkDescriptorSetLayoutObj::VkDescriptorSetLayoutObj(const VkDeviceObj *device,
670 const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings,
671 VkDescriptorSetLayoutCreateFlags flags) {
672 VkDescriptorSetLayoutCreateInfo dsl_ci = {};
673 dsl_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
674 dsl_ci.flags = flags;
675 dsl_ci.bindingCount = static_cast<uint32_t>(descriptor_set_bindings.size());
676 dsl_ci.pBindings = descriptor_set_bindings.data();
677
678 init(*device, dsl_ci);
679 }
680
VkDescriptorSetObj(VkDeviceObj * device)681 VkDescriptorSetObj::VkDescriptorSetObj(VkDeviceObj *device) : m_device(device), m_nextSlot(0) {}
682
~VkDescriptorSetObj()683 VkDescriptorSetObj::~VkDescriptorSetObj() {
684 if (m_set) {
685 delete m_set;
686 }
687 }
688
AppendDummy()689 int VkDescriptorSetObj::AppendDummy() {
690 /* request a descriptor but do not update it */
691 VkDescriptorSetLayoutBinding binding = {};
692 binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
693 binding.descriptorCount = 1;
694 binding.binding = m_layout_bindings.size();
695 binding.stageFlags = VK_SHADER_STAGE_ALL;
696 binding.pImmutableSamplers = NULL;
697
698 m_layout_bindings.push_back(binding);
699 m_type_counts[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] += binding.descriptorCount;
700
701 return m_nextSlot++;
702 }
703
AppendBuffer(VkDescriptorType type,VkConstantBufferObj & constantBuffer)704 int VkDescriptorSetObj::AppendBuffer(VkDescriptorType type, VkConstantBufferObj &constantBuffer) {
705 assert(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
706 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
707 VkDescriptorSetLayoutBinding binding = {};
708 binding.descriptorType = type;
709 binding.descriptorCount = 1;
710 binding.binding = m_layout_bindings.size();
711 binding.stageFlags = VK_SHADER_STAGE_ALL;
712 binding.pImmutableSamplers = NULL;
713
714 m_layout_bindings.push_back(binding);
715 m_type_counts[type] += binding.descriptorCount;
716
717 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0, type, 1,
718 &constantBuffer.m_descriptorBufferInfo));
719
720 return m_nextSlot++;
721 }
722
AppendSamplerTexture(VkSamplerObj * sampler,VkTextureObj * texture)723 int VkDescriptorSetObj::AppendSamplerTexture(VkSamplerObj *sampler, VkTextureObj *texture) {
724 VkDescriptorSetLayoutBinding binding = {};
725 binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
726 binding.descriptorCount = 1;
727 binding.binding = m_layout_bindings.size();
728 binding.stageFlags = VK_SHADER_STAGE_ALL;
729 binding.pImmutableSamplers = NULL;
730
731 m_layout_bindings.push_back(binding);
732 m_type_counts[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] += binding.descriptorCount;
733 VkDescriptorImageInfo tmp = texture->DescriptorImageInfo();
734 tmp.sampler = sampler->handle();
735 m_imageSamplerDescriptors.push_back(tmp);
736
737 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0,
738 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &tmp));
739
740 return m_nextSlot++;
741 }
742
GetPipelineLayout() const743 VkPipelineLayout VkDescriptorSetObj::GetPipelineLayout() const { return m_pipeline_layout.handle(); }
744
GetDescriptorSetHandle() const745 VkDescriptorSet VkDescriptorSetObj::GetDescriptorSetHandle() const {
746 if (m_set)
747 return m_set->handle();
748 else
749 return VK_NULL_HANDLE;
750 }
751
CreateVKDescriptorSet(VkCommandBufferObj * commandBuffer)752 void VkDescriptorSetObj::CreateVKDescriptorSet(VkCommandBufferObj *commandBuffer) {
753 if (m_type_counts.size()) {
754 // create VkDescriptorPool
755 VkDescriptorPoolSize poolSize;
756 vector<VkDescriptorPoolSize> sizes;
757 for (auto it = m_type_counts.begin(); it != m_type_counts.end(); ++it) {
758 poolSize.descriptorCount = it->second;
759 poolSize.type = it->first;
760 sizes.push_back(poolSize);
761 }
762 VkDescriptorPoolCreateInfo pool = {};
763 pool.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
764 pool.poolSizeCount = sizes.size();
765 pool.maxSets = 1;
766 pool.pPoolSizes = sizes.data();
767 init(*m_device, pool);
768 }
769
770 // create VkDescriptorSetLayout
771 VkDescriptorSetLayoutCreateInfo layout = {};
772 layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
773 layout.bindingCount = m_layout_bindings.size();
774 layout.pBindings = m_layout_bindings.data();
775
776 m_layout.init(*m_device, layout);
777 vector<const vk_testing::DescriptorSetLayout *> layouts;
778 layouts.push_back(&m_layout);
779
780 // create VkPipelineLayout
781 VkPipelineLayoutCreateInfo pipeline_layout = {};
782 pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
783 pipeline_layout.setLayoutCount = layouts.size();
784 pipeline_layout.pSetLayouts = NULL;
785
786 m_pipeline_layout.init(*m_device, pipeline_layout, layouts);
787
788 if (m_type_counts.size()) {
789 // create VkDescriptorSet
790 m_set = alloc_sets(*m_device, m_layout);
791
792 // build the update array
793 size_t imageSamplerCount = 0;
794 for (std::vector<VkWriteDescriptorSet>::iterator it = m_writes.begin(); it != m_writes.end(); it++) {
795 it->dstSet = m_set->handle();
796 if (it->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
797 it->pImageInfo = &m_imageSamplerDescriptors[imageSamplerCount++];
798 }
799
800 // do the updates
801 m_device->update_descriptor_sets(m_writes);
802 }
803 }
804
VkRenderpassObj(VkDeviceObj * dev)805 VkRenderpassObj::VkRenderpassObj(VkDeviceObj *dev) {
806 // Create a renderPass with a single color attachment
807 VkAttachmentReference attach = {};
808 attach.layout = VK_IMAGE_LAYOUT_GENERAL;
809
810 VkSubpassDescription subpass = {};
811 subpass.pColorAttachments = &attach;
812 subpass.colorAttachmentCount = 1;
813
814 VkRenderPassCreateInfo rpci = {};
815 rpci.subpassCount = 1;
816 rpci.pSubpasses = &subpass;
817 rpci.attachmentCount = 1;
818
819 VkAttachmentDescription attach_desc = {};
820 attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
821 attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
822 attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
823 attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
824
825 rpci.pAttachments = &attach_desc;
826 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
827
828 device = dev->device();
829 vkCreateRenderPass(device, &rpci, NULL, &m_renderpass);
830 }
831
~VkRenderpassObj()832 VkRenderpassObj::~VkRenderpassObj() { vkDestroyRenderPass(device, m_renderpass, NULL); }
833
VkImageObj(VkDeviceObj * dev)834 VkImageObj::VkImageObj(VkDeviceObj *dev) {
835 m_device = dev;
836 m_descriptorImageInfo.imageView = VK_NULL_HANDLE;
837 m_descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
838 }
839
840 // clang-format off
ImageMemoryBarrier(VkCommandBufferObj * cmd_buf,VkImageAspectFlags aspect,VkFlags output_mask,VkFlags input_mask,VkImageLayout image_layout)841 void VkImageObj::ImageMemoryBarrier(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect,
842 VkFlags output_mask /*=
843 VK_ACCESS_HOST_WRITE_BIT |
844 VK_ACCESS_SHADER_WRITE_BIT |
845 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
846 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
847 VK_MEMORY_OUTPUT_COPY_BIT*/,
848 VkFlags input_mask /*=
849 VK_ACCESS_HOST_READ_BIT |
850 VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
851 VK_ACCESS_INDEX_READ_BIT |
852 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
853 VK_ACCESS_UNIFORM_READ_BIT |
854 VK_ACCESS_SHADER_READ_BIT |
855 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
856 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
857 VK_MEMORY_INPUT_COPY_BIT*/, VkImageLayout image_layout) {
858 // clang-format on
859 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
860 const VkImageSubresourceRange subresourceRange =
861 subresource_range(aspect, 0, /*VK_REMAINING_MIP_LEVELS*/ 1, 0, 1 /*VK_REMAINING_ARRAY_LAYERS*/);
862 VkImageMemoryBarrier barrier;
863 barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange);
864
865 VkImageMemoryBarrier *pmemory_barrier = &barrier;
866
867 VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
868 VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
869
870 // write barrier to the command buffer
871 vkCmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, 0, 0, NULL, 0, NULL, 1, pmemory_barrier);
872 }
873
SetLayout(VkCommandBufferObj * cmd_buf,VkImageAspectFlags aspect,VkImageLayout image_layout)874 void VkImageObj::SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout) {
875 VkFlags src_mask, dst_mask;
876 const VkFlags all_cache_outputs = VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
877 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
878 const VkFlags all_cache_inputs = VK_ACCESS_HOST_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT |
879 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
880 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
881 VK_ACCESS_MEMORY_READ_BIT;
882
883 if (image_layout == m_descriptorImageInfo.imageLayout) {
884 return;
885 }
886
887 switch (image_layout) {
888 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
889 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
890 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
891 else
892 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
893 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT;
894 break;
895
896 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
897 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
898 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
899 else if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
900 src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
901 else
902 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
903 dst_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
904 break;
905
906 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
907 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
908 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
909 else
910 src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
911 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT;
912 break;
913
914 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
915 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
916 src_mask = VK_ACCESS_TRANSFER_READ_BIT;
917 else
918 src_mask = 0;
919 dst_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
920 break;
921
922 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
923 dst_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
924 src_mask = all_cache_outputs;
925 break;
926
927 default:
928 src_mask = all_cache_outputs;
929 dst_mask = all_cache_inputs;
930 break;
931 }
932
933 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_UNDEFINED) src_mask = 0;
934
935 ImageMemoryBarrier(cmd_buf, aspect, src_mask, dst_mask, image_layout);
936 m_descriptorImageInfo.imageLayout = image_layout;
937 }
938
SetLayout(VkImageAspectFlags aspect,VkImageLayout image_layout)939 void VkImageObj::SetLayout(VkImageAspectFlags aspect, VkImageLayout image_layout) {
940 if (image_layout == m_descriptorImageInfo.imageLayout) {
941 return;
942 }
943
944 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
945 VkCommandBufferObj cmd_buf(m_device, &pool);
946
947 /* Build command buffer to set image layout in the driver */
948 cmd_buf.begin();
949 SetLayout(&cmd_buf, aspect, image_layout);
950 cmd_buf.end();
951
952 cmd_buf.QueueCommandBuffer();
953 }
954
IsCompatible(const VkImageUsageFlags usages,const VkFormatFeatureFlags features)955 bool VkImageObj::IsCompatible(const VkImageUsageFlags usages, const VkFormatFeatureFlags features) {
956 VkFormatFeatureFlags all_feature_flags =
957 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
958 VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT |
959 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT |
960 VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT |
961 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
962 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
963 if (m_device->IsEnabledExtension(VK_IMG_FILTER_CUBIC_EXTENSION_NAME)) {
964 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG;
965 }
966
967 if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
968 all_feature_flags |= VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR;
969 }
970
971 if (m_device->IsEnabledExtension(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME)) {
972 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT;
973 }
974
975 if (m_device->IsEnabledExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
976 all_feature_flags |= VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR |
977 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR |
978 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR |
979 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR |
980 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR |
981 VK_FORMAT_FEATURE_DISJOINT_BIT_KHR | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR;
982 }
983
984 if ((features & all_feature_flags) == 0) return false; // whole format unsupported
985
986 if ((usages & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) return false;
987 if ((usages & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) return false;
988 if ((usages & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) return false;
989 if ((usages & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
990 return false;
991
992 if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
993 // WORKAROUND: for DevSim not reporting extended enums, and possibly some drivers too
994 const auto all_nontransfer_feature_flags =
995 all_feature_flags ^ (VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR);
996 const bool transfer_probably_supported_anyway = (features & all_nontransfer_feature_flags) > 0;
997 if (!transfer_probably_supported_anyway) {
998 if ((usages & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR)) return false;
999 if ((usages & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) return false;
1000 }
1001 }
1002
1003 return true;
1004 }
1005
InitNoLayout(uint32_t const width,uint32_t const height,uint32_t const mipLevels,VkFormat const format,VkFlags const usage,VkImageTiling const requested_tiling,VkMemoryPropertyFlags const reqs,const std::vector<uint32_t> * queue_families)1006 void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
1007 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
1008 const std::vector<uint32_t> *queue_families) {
1009 VkFormatProperties image_fmt;
1010 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
1011
1012 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), format, &image_fmt);
1013
1014 if (requested_tiling == VK_IMAGE_TILING_LINEAR) {
1015 if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
1016 tiling = VK_IMAGE_TILING_LINEAR;
1017 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
1018 tiling = VK_IMAGE_TILING_OPTIMAL;
1019 } else {
1020 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
1021 << ", supported linear features: " << image_fmt.linearTilingFeatures;
1022 }
1023 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
1024 tiling = VK_IMAGE_TILING_OPTIMAL;
1025 } else if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
1026 tiling = VK_IMAGE_TILING_LINEAR;
1027 } else {
1028 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
1029 << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
1030 }
1031
1032 VkImageCreateInfo imageCreateInfo = vk_testing::Image::create_info();
1033 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1034 imageCreateInfo.format = format;
1035 imageCreateInfo.extent.width = width;
1036 imageCreateInfo.extent.height = height;
1037 imageCreateInfo.mipLevels = mipLevels;
1038 imageCreateInfo.tiling = tiling;
1039 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1040
1041 // Automatically set sharing mode etc. based on queue family information
1042 if (queue_families && (queue_families->size() > 1)) {
1043 imageCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
1044 imageCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queue_families->size());
1045 imageCreateInfo.pQueueFamilyIndices = queue_families->data();
1046 }
1047
1048 Layout(imageCreateInfo.initialLayout);
1049 imageCreateInfo.usage = usage;
1050
1051 vk_testing::Image::init(*m_device, imageCreateInfo, reqs);
1052 }
1053
Init(uint32_t const width,uint32_t const height,uint32_t const mipLevels,VkFormat const format,VkFlags const usage,VkImageTiling const requested_tiling,VkMemoryPropertyFlags const reqs,const std::vector<uint32_t> * queue_families)1054 void VkImageObj::Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
1055 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
1056 const std::vector<uint32_t> *queue_families) {
1057 InitNoLayout(width, height, mipLevels, format, usage, requested_tiling, reqs, queue_families);
1058
1059 if (!initialized()) return; // We don't have a valid handle from early stage init, and thus SetLayout will fail
1060
1061 VkImageLayout newLayout;
1062 if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
1063 newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1064 else if (usage & VK_IMAGE_USAGE_SAMPLED_BIT)
1065 newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1066 else
1067 newLayout = m_descriptorImageInfo.imageLayout;
1068
1069 VkImageAspectFlags image_aspect = 0;
1070 if (FormatIsDepthAndStencil(format)) {
1071 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1072 } else if (FormatIsDepthOnly(format)) {
1073 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1074 } else if (FormatIsStencilOnly(format)) {
1075 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1076 } else { // color
1077 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1078 }
1079 SetLayout(image_aspect, newLayout);
1080 }
1081
init(const VkImageCreateInfo * create_info)1082 void VkImageObj::init(const VkImageCreateInfo *create_info) {
1083 VkFormatProperties image_fmt;
1084 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info->format, &image_fmt);
1085
1086 switch (create_info->tiling) {
1087 case VK_IMAGE_TILING_OPTIMAL:
1088 if (!IsCompatible(create_info->usage, image_fmt.optimalTilingFeatures)) {
1089 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
1090 << create_info->usage << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
1091 }
1092 break;
1093 case VK_IMAGE_TILING_LINEAR:
1094 if (!IsCompatible(create_info->usage, image_fmt.linearTilingFeatures)) {
1095 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
1096 << create_info->usage << ", supported linear features: " << image_fmt.linearTilingFeatures;
1097 }
1098 break;
1099 default:
1100 break;
1101 }
1102 Layout(create_info->initialLayout);
1103
1104 vk_testing::Image::init(*m_device, *create_info, 0);
1105
1106 VkImageAspectFlags image_aspect = 0;
1107 if (FormatIsDepthAndStencil(create_info->format)) {
1108 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1109 } else if (FormatIsDepthOnly(create_info->format)) {
1110 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1111 } else if (FormatIsStencilOnly(create_info->format)) {
1112 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1113 } else { // color
1114 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1115 }
1116 SetLayout(image_aspect, VK_IMAGE_LAYOUT_GENERAL);
1117 }
1118
CopyImage(VkImageObj & src_image)1119 VkResult VkImageObj::CopyImage(VkImageObj &src_image) {
1120 VkImageLayout src_image_layout, dest_image_layout;
1121
1122 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1123 VkCommandBufferObj cmd_buf(m_device, &pool);
1124
1125 /* Build command buffer to copy staging texture to usable texture */
1126 cmd_buf.begin();
1127
1128 /* TODO: Can we determine image aspect from image object? */
1129 src_image_layout = src_image.Layout();
1130 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1131
1132 dest_image_layout = (this->Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout();
1133 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1134
1135 VkImageCopy copy_region = {};
1136 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1137 copy_region.srcSubresource.baseArrayLayer = 0;
1138 copy_region.srcSubresource.mipLevel = 0;
1139 copy_region.srcSubresource.layerCount = 1;
1140 copy_region.srcOffset.x = 0;
1141 copy_region.srcOffset.y = 0;
1142 copy_region.srcOffset.z = 0;
1143 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1144 copy_region.dstSubresource.baseArrayLayer = 0;
1145 copy_region.dstSubresource.mipLevel = 0;
1146 copy_region.dstSubresource.layerCount = 1;
1147 copy_region.dstOffset.x = 0;
1148 copy_region.dstOffset.y = 0;
1149 copy_region.dstOffset.z = 0;
1150 copy_region.extent = src_image.extent();
1151
1152 vkCmdCopyImage(cmd_buf.handle(), src_image.handle(), src_image.Layout(), handle(), Layout(), 1, ©_region);
1153
1154 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
1155
1156 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
1157
1158 cmd_buf.end();
1159
1160 cmd_buf.QueueCommandBuffer();
1161
1162 return VK_SUCCESS;
1163 }
1164
1165 // Same as CopyImage, but in the opposite direction
CopyImageOut(VkImageObj & dst_image)1166 VkResult VkImageObj::CopyImageOut(VkImageObj &dst_image) {
1167 VkImageLayout src_image_layout, dest_image_layout;
1168
1169 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1170 VkCommandBufferObj cmd_buf(m_device, &pool);
1171
1172 cmd_buf.begin();
1173
1174 src_image_layout = this->Layout();
1175 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1176
1177 dest_image_layout = (dst_image.Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout();
1178 dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1179
1180 VkImageCopy copy_region = {};
1181 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1182 copy_region.srcSubresource.baseArrayLayer = 0;
1183 copy_region.srcSubresource.mipLevel = 0;
1184 copy_region.srcSubresource.layerCount = 1;
1185 copy_region.srcOffset.x = 0;
1186 copy_region.srcOffset.y = 0;
1187 copy_region.srcOffset.z = 0;
1188 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1189 copy_region.dstSubresource.baseArrayLayer = 0;
1190 copy_region.dstSubresource.mipLevel = 0;
1191 copy_region.dstSubresource.layerCount = 1;
1192 copy_region.dstOffset.x = 0;
1193 copy_region.dstOffset.y = 0;
1194 copy_region.dstOffset.z = 0;
1195 copy_region.extent = dst_image.extent();
1196
1197 vkCmdCopyImage(cmd_buf.handle(), handle(), Layout(), dst_image.handle(), dst_image.Layout(), 1, ©_region);
1198
1199 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
1200
1201 dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
1202
1203 cmd_buf.end();
1204
1205 cmd_buf.QueueCommandBuffer();
1206
1207 return VK_SUCCESS;
1208 }
1209
1210 // Return 16x16 pixel block
Read()1211 std::array<std::array<uint32_t, 16>, 16> VkImageObj::Read() {
1212 VkImageObj stagingImage(m_device);
1213 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1214
1215 stagingImage.Init(16, 16, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1216 VK_IMAGE_TILING_LINEAR, reqs);
1217 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1218 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
1219 CopyImageOut(stagingImage);
1220 void *data = stagingImage.MapMemory();
1221 std::array<std::array<uint32_t, 16>, 16> m = {};
1222 if (data) {
1223 for (uint32_t y = 0; y < stagingImage.extent().height; y++) {
1224 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
1225 for (uint32_t x = 0; x < stagingImage.extent().width; x++) m[y][x] = row[x];
1226 }
1227 }
1228 stagingImage.UnmapMemory();
1229 return m;
1230 }
1231
VkTextureObj(VkDeviceObj * device,uint32_t * colors)1232 VkTextureObj::VkTextureObj(VkDeviceObj *device, uint32_t *colors) : VkImageObj(device) {
1233 m_device = device;
1234 const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
1235 uint32_t tex_colors[2] = {0xffff0000, 0xff00ff00};
1236 void *data;
1237 uint32_t x, y;
1238 VkImageObj stagingImage(device);
1239 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1240
1241 stagingImage.Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1242 VK_IMAGE_TILING_LINEAR, reqs);
1243 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
1244
1245 if (colors == NULL) colors = tex_colors;
1246
1247 VkImageViewCreateInfo view = {};
1248 view.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1249 view.pNext = NULL;
1250 view.image = VK_NULL_HANDLE;
1251 view.viewType = VK_IMAGE_VIEW_TYPE_2D;
1252 view.format = tex_format;
1253 view.components.r = VK_COMPONENT_SWIZZLE_R;
1254 view.components.g = VK_COMPONENT_SWIZZLE_G;
1255 view.components.b = VK_COMPONENT_SWIZZLE_B;
1256 view.components.a = VK_COMPONENT_SWIZZLE_A;
1257 view.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1258 view.subresourceRange.baseMipLevel = 0;
1259 view.subresourceRange.levelCount = 1;
1260 view.subresourceRange.baseArrayLayer = 0;
1261 view.subresourceRange.layerCount = 1;
1262
1263 /* create image */
1264 Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
1265 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1266
1267 /* create image view */
1268 view.image = handle();
1269 m_textureView.init(*m_device, view);
1270 m_descriptorImageInfo.imageView = m_textureView.handle();
1271
1272 data = stagingImage.MapMemory();
1273
1274 for (y = 0; y < extent().height; y++) {
1275 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
1276 for (x = 0; x < extent().width; x++) row[x] = colors[(x & 1) ^ (y & 1)];
1277 }
1278 stagingImage.UnmapMemory();
1279 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1280 VkImageObj::CopyImage(stagingImage);
1281 }
1282
VkSamplerObj(VkDeviceObj * device)1283 VkSamplerObj::VkSamplerObj(VkDeviceObj *device) {
1284 m_device = device;
1285
1286 VkSamplerCreateInfo samplerCreateInfo;
1287 memset(&samplerCreateInfo, 0, sizeof(samplerCreateInfo));
1288 samplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
1289 samplerCreateInfo.magFilter = VK_FILTER_NEAREST;
1290 samplerCreateInfo.minFilter = VK_FILTER_NEAREST;
1291 samplerCreateInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
1292 samplerCreateInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1293 samplerCreateInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1294 samplerCreateInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1295 samplerCreateInfo.mipLodBias = 0.0;
1296 samplerCreateInfo.anisotropyEnable = VK_FALSE;
1297 samplerCreateInfo.maxAnisotropy = 1;
1298 samplerCreateInfo.compareOp = VK_COMPARE_OP_NEVER;
1299 samplerCreateInfo.minLod = 0.0;
1300 samplerCreateInfo.maxLod = 0.0;
1301 samplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
1302 samplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
1303
1304 init(*m_device, samplerCreateInfo);
1305 }
1306
1307 /*
1308 * Basic ConstantBuffer constructor. Then use create methods to fill in the
1309 * details.
1310 */
VkConstantBufferObj(VkDeviceObj * device,VkBufferUsageFlags usage)1311 VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkBufferUsageFlags usage) {
1312 m_device = device;
1313
1314 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
1315
1316 // Special case for usages outside of original limits of framework
1317 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) != usage) {
1318 init_no_mem(*m_device, create_info(0, usage));
1319 }
1320 }
1321
VkConstantBufferObj(VkDeviceObj * device,VkDeviceSize allocationSize,const void * data,VkBufferUsageFlags usage)1322 VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize allocationSize, const void *data,
1323 VkBufferUsageFlags usage) {
1324 m_device = device;
1325
1326 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
1327
1328 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1329
1330 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) == usage) {
1331 init_as_src_and_dst(*m_device, allocationSize, reqs);
1332 } else {
1333 init(*m_device, create_info(allocationSize, usage), reqs);
1334 }
1335
1336 void *pData = memory().map();
1337 memcpy(pData, data, static_cast<size_t>(allocationSize));
1338 memory().unmap();
1339
1340 /*
1341 * Constant buffers are going to be used as vertex input buffers
1342 * or as shader uniform buffers. So, we'll create the shaderbuffer
1343 * descriptor here so it's ready if needed.
1344 */
1345 this->m_descriptorBufferInfo.buffer = handle();
1346 this->m_descriptorBufferInfo.offset = 0;
1347 this->m_descriptorBufferInfo.range = allocationSize;
1348 }
1349
GetStageCreateInfo() const1350 VkPipelineShaderStageCreateInfo const &VkShaderObj::GetStageCreateInfo() const { return m_stage_info; }
1351
VkShaderObj(VkDeviceObj * device,const char * shader_code,VkShaderStageFlagBits stage,VkRenderFramework * framework,char const * name,bool debug)1352 VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderStageFlagBits stage, VkRenderFramework *framework,
1353 char const *name, bool debug) {
1354 VkResult U_ASSERT_ONLY err = VK_SUCCESS;
1355 std::vector<unsigned int> spv;
1356 VkShaderModuleCreateInfo moduleCreateInfo;
1357
1358 m_device = device;
1359 m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1360 m_stage_info.pNext = nullptr;
1361 m_stage_info.flags = 0;
1362 m_stage_info.stage = stage;
1363 m_stage_info.module = VK_NULL_HANDLE;
1364 m_stage_info.pName = name;
1365 m_stage_info.pSpecializationInfo = nullptr;
1366
1367 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1368 moduleCreateInfo.pNext = nullptr;
1369
1370 framework->GLSLtoSPV(stage, shader_code, spv, debug);
1371 moduleCreateInfo.pCode = spv.data();
1372 moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int);
1373 moduleCreateInfo.flags = 0;
1374
1375 err = init_try(*m_device, moduleCreateInfo);
1376 m_stage_info.module = handle();
1377 assert(VK_SUCCESS == err);
1378 }
1379
VkShaderObj(VkDeviceObj * device,const std::string spv_source,VkShaderStageFlagBits stage,VkRenderFramework * framework,char const * name)1380 VkShaderObj::VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkShaderStageFlagBits stage,
1381 VkRenderFramework *framework, char const *name) {
1382 VkResult U_ASSERT_ONLY err = VK_SUCCESS;
1383 std::vector<unsigned int> spv;
1384 VkShaderModuleCreateInfo moduleCreateInfo;
1385
1386 m_device = device;
1387 m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1388 m_stage_info.pNext = nullptr;
1389 m_stage_info.flags = 0;
1390 m_stage_info.stage = stage;
1391 m_stage_info.module = VK_NULL_HANDLE;
1392 m_stage_info.pName = name;
1393 m_stage_info.pSpecializationInfo = nullptr;
1394
1395 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1396 moduleCreateInfo.pNext = nullptr;
1397
1398 framework->ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
1399 moduleCreateInfo.pCode = spv.data();
1400 moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int);
1401 moduleCreateInfo.flags = 0;
1402
1403 err = init_try(*m_device, moduleCreateInfo);
1404 m_stage_info.module = handle();
1405 assert(VK_SUCCESS == err);
1406 }
1407
VkPipelineLayoutObj(VkDeviceObj * device,const std::vector<const VkDescriptorSetLayoutObj * > & descriptor_layouts,const std::vector<VkPushConstantRange> & push_constant_ranges)1408 VkPipelineLayoutObj::VkPipelineLayoutObj(VkDeviceObj *device,
1409 const std::vector<const VkDescriptorSetLayoutObj *> &descriptor_layouts,
1410 const std::vector<VkPushConstantRange> &push_constant_ranges) {
1411 VkPipelineLayoutCreateInfo pl_ci = {};
1412 pl_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1413 pl_ci.pushConstantRangeCount = static_cast<uint32_t>(push_constant_ranges.size());
1414 pl_ci.pPushConstantRanges = push_constant_ranges.data();
1415
1416 auto descriptor_layouts_unwrapped = MakeTestbindingHandles<const vk_testing::DescriptorSetLayout>(descriptor_layouts);
1417
1418 init(*device, pl_ci, descriptor_layouts_unwrapped);
1419 }
1420
Reset()1421 void VkPipelineLayoutObj::Reset() { *this = VkPipelineLayoutObj(); }
1422
VkPipelineObj(VkDeviceObj * device)1423 VkPipelineObj::VkPipelineObj(VkDeviceObj *device) {
1424 m_device = device;
1425
1426 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1427 m_vi_state.pNext = nullptr;
1428 m_vi_state.flags = 0;
1429 m_vi_state.vertexBindingDescriptionCount = 0;
1430 m_vi_state.pVertexBindingDescriptions = nullptr;
1431 m_vi_state.vertexAttributeDescriptionCount = 0;
1432 m_vi_state.pVertexAttributeDescriptions = nullptr;
1433
1434 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1435 m_ia_state.pNext = nullptr;
1436 m_ia_state.flags = 0;
1437 m_ia_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1438 m_ia_state.primitiveRestartEnable = VK_FALSE;
1439
1440 m_te_state = nullptr;
1441
1442 m_vp_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1443 m_vp_state.pNext = VK_NULL_HANDLE;
1444 m_vp_state.flags = 0;
1445 m_vp_state.viewportCount = 1;
1446 m_vp_state.scissorCount = 1;
1447 m_vp_state.pViewports = nullptr;
1448 m_vp_state.pScissors = nullptr;
1449
1450 m_rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1451 m_rs_state.pNext = nullptr;
1452 m_rs_state.flags = 0;
1453 m_rs_state.depthClampEnable = VK_FALSE;
1454 m_rs_state.rasterizerDiscardEnable = VK_FALSE;
1455 m_rs_state.polygonMode = VK_POLYGON_MODE_FILL;
1456 m_rs_state.cullMode = VK_CULL_MODE_BACK_BIT;
1457 m_rs_state.frontFace = VK_FRONT_FACE_CLOCKWISE;
1458 m_rs_state.depthBiasEnable = VK_FALSE;
1459 m_rs_state.depthBiasConstantFactor = 0.0f;
1460 m_rs_state.depthBiasClamp = 0.0f;
1461 m_rs_state.depthBiasSlopeFactor = 0.0f;
1462 m_rs_state.lineWidth = 1.0f;
1463
1464 m_ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1465 m_ms_state.pNext = nullptr;
1466 m_ms_state.flags = 0;
1467 m_ms_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1468 m_ms_state.sampleShadingEnable = VK_FALSE;
1469 m_ms_state.minSampleShading = 0.0f;
1470 m_ms_state.pSampleMask = nullptr;
1471 m_ms_state.alphaToCoverageEnable = VK_FALSE;
1472 m_ms_state.alphaToOneEnable = VK_FALSE;
1473
1474 m_ds_state = nullptr;
1475
1476 memset(&m_cb_state, 0, sizeof(m_cb_state));
1477 m_cb_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1478 m_cb_state.blendConstants[0] = 1.0f;
1479 m_cb_state.blendConstants[1] = 1.0f;
1480 m_cb_state.blendConstants[2] = 1.0f;
1481 m_cb_state.blendConstants[3] = 1.0f;
1482
1483 memset(&m_pd_state, 0, sizeof(m_pd_state));
1484 }
1485
AddShader(VkShaderObj * shader)1486 void VkPipelineObj::AddShader(VkShaderObj *shader) { m_shaderStages.push_back(shader->GetStageCreateInfo()); }
1487
AddShader(VkPipelineShaderStageCreateInfo const & createInfo)1488 void VkPipelineObj::AddShader(VkPipelineShaderStageCreateInfo const &createInfo) { m_shaderStages.push_back(createInfo); }
1489
AddVertexInputAttribs(VkVertexInputAttributeDescription * vi_attrib,uint32_t count)1490 void VkPipelineObj::AddVertexInputAttribs(VkVertexInputAttributeDescription *vi_attrib, uint32_t count) {
1491 m_vi_state.pVertexAttributeDescriptions = vi_attrib;
1492 m_vi_state.vertexAttributeDescriptionCount = count;
1493 }
1494
AddVertexInputBindings(VkVertexInputBindingDescription * vi_binding,uint32_t count)1495 void VkPipelineObj::AddVertexInputBindings(VkVertexInputBindingDescription *vi_binding, uint32_t count) {
1496 m_vi_state.pVertexBindingDescriptions = vi_binding;
1497 m_vi_state.vertexBindingDescriptionCount = count;
1498 }
1499
AddColorAttachment(uint32_t binding,const VkPipelineColorBlendAttachmentState & att)1500 void VkPipelineObj::AddColorAttachment(uint32_t binding, const VkPipelineColorBlendAttachmentState &att) {
1501 if (binding + 1 > m_colorAttachments.size()) {
1502 m_colorAttachments.resize(binding + 1);
1503 }
1504 m_colorAttachments[binding] = att;
1505 }
1506
SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo * ds_state)1507 void VkPipelineObj::SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo *ds_state) { m_ds_state = ds_state; }
1508
SetViewport(const vector<VkViewport> viewports)1509 void VkPipelineObj::SetViewport(const vector<VkViewport> viewports) {
1510 m_viewports = viewports;
1511 // If we explicitly set a null viewport, pass it through to create info
1512 // but preserve viewportCount because it musn't change
1513 if (m_viewports.size() == 0) {
1514 m_vp_state.pViewports = nullptr;
1515 }
1516 }
1517
SetScissor(const vector<VkRect2D> scissors)1518 void VkPipelineObj::SetScissor(const vector<VkRect2D> scissors) {
1519 m_scissors = scissors;
1520 // If we explicitly set a null scissor, pass it through to create info
1521 // but preserve scissorCount because it musn't change
1522 if (m_scissors.size() == 0) {
1523 m_vp_state.pScissors = nullptr;
1524 }
1525 }
1526
MakeDynamic(VkDynamicState state)1527 void VkPipelineObj::MakeDynamic(VkDynamicState state) {
1528 /* Only add a state once */
1529 for (auto it = m_dynamic_state_enables.begin(); it != m_dynamic_state_enables.end(); it++) {
1530 if ((*it) == state) return;
1531 }
1532 m_dynamic_state_enables.push_back(state);
1533 }
1534
SetMSAA(const VkPipelineMultisampleStateCreateInfo * ms_state)1535 void VkPipelineObj::SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state) { m_ms_state = *ms_state; }
1536
SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo * ia_state)1537 void VkPipelineObj::SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state) { m_ia_state = *ia_state; }
1538
SetRasterization(const VkPipelineRasterizationStateCreateInfo * rs_state)1539 void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) { m_rs_state = *rs_state; }
1540
SetTessellation(const VkPipelineTessellationStateCreateInfo * te_state)1541 void VkPipelineObj::SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state) { m_te_state = te_state; }
1542
InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo * gp_ci)1543 void VkPipelineObj::InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci) {
1544 gp_ci->stageCount = m_shaderStages.size();
1545 gp_ci->pStages = m_shaderStages.size() ? m_shaderStages.data() : nullptr;
1546
1547 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1548 gp_ci->pVertexInputState = &m_vi_state;
1549
1550 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1551 gp_ci->pInputAssemblyState = &m_ia_state;
1552
1553 gp_ci->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1554 gp_ci->pNext = NULL;
1555 gp_ci->flags = 0;
1556
1557 m_cb_state.attachmentCount = m_colorAttachments.size();
1558 m_cb_state.pAttachments = m_colorAttachments.data();
1559
1560 if (m_viewports.size() > 0) {
1561 m_vp_state.viewportCount = m_viewports.size();
1562 m_vp_state.pViewports = m_viewports.data();
1563 } else {
1564 MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
1565 }
1566
1567 if (m_scissors.size() > 0) {
1568 m_vp_state.scissorCount = m_scissors.size();
1569 m_vp_state.pScissors = m_scissors.data();
1570 } else {
1571 MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
1572 }
1573
1574 memset(&m_pd_state, 0, sizeof(m_pd_state));
1575 if (m_dynamic_state_enables.size() > 0) {
1576 m_pd_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1577 m_pd_state.dynamicStateCount = m_dynamic_state_enables.size();
1578 m_pd_state.pDynamicStates = m_dynamic_state_enables.data();
1579 gp_ci->pDynamicState = &m_pd_state;
1580 }
1581
1582 gp_ci->subpass = 0;
1583 gp_ci->pViewportState = &m_vp_state;
1584 gp_ci->pRasterizationState = &m_rs_state;
1585 gp_ci->pMultisampleState = &m_ms_state;
1586 gp_ci->pDepthStencilState = m_ds_state;
1587 gp_ci->pColorBlendState = &m_cb_state;
1588 gp_ci->pTessellationState = m_te_state;
1589 }
1590
CreateVKPipeline(VkPipelineLayout layout,VkRenderPass render_pass,VkGraphicsPipelineCreateInfo * gp_ci)1591 VkResult VkPipelineObj::CreateVKPipeline(VkPipelineLayout layout, VkRenderPass render_pass, VkGraphicsPipelineCreateInfo *gp_ci) {
1592 VkGraphicsPipelineCreateInfo info = {};
1593
1594 // if not given a CreateInfo, create and initialize a local one.
1595 if (gp_ci == nullptr) {
1596 gp_ci = &info;
1597 InitGraphicsPipelineCreateInfo(gp_ci);
1598 }
1599
1600 gp_ci->layout = layout;
1601 gp_ci->renderPass = render_pass;
1602
1603 return init_try(*m_device, *gp_ci);
1604 }
1605
VkCommandBufferObj(VkDeviceObj * device,VkCommandPoolObj * pool,VkCommandBufferLevel level,VkQueueObj * queue)1606 VkCommandBufferObj::VkCommandBufferObj(VkDeviceObj *device, VkCommandPoolObj *pool, VkCommandBufferLevel level, VkQueueObj *queue) {
1607 m_device = device;
1608 if (queue) {
1609 m_queue = queue;
1610 } else {
1611 m_queue = m_device->GetDefaultQueue();
1612 }
1613 assert(m_queue);
1614
1615 auto create_info = vk_testing::CommandBuffer::create_info(pool->handle());
1616 create_info.level = level;
1617 init(*device, create_info);
1618 }
1619
PipelineBarrier(VkPipelineStageFlags src_stages,VkPipelineStageFlags dest_stages,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1620 void VkCommandBufferObj::PipelineBarrier(VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
1621 VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
1622 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
1623 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
1624 const VkImageMemoryBarrier *pImageMemoryBarriers) {
1625 vkCmdPipelineBarrier(handle(), src_stages, dest_stages, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
1626 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1627 }
1628
ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> & color_objs,VkClearColorValue clear_color,VkDepthStencilObj * depth_stencil_obj,float depth_clear_value,uint32_t stencil_clear_value)1629 void VkCommandBufferObj::ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> &color_objs, VkClearColorValue clear_color,
1630 VkDepthStencilObj *depth_stencil_obj, float depth_clear_value,
1631 uint32_t stencil_clear_value) {
1632 // whatever we want to do, we do it to the whole buffer
1633 VkImageSubresourceRange subrange = {};
1634 // srRange.aspectMask to be set later
1635 subrange.baseMipLevel = 0;
1636 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
1637 subrange.levelCount = 1; // VK_REMAINING_MIP_LEVELS;
1638 subrange.baseArrayLayer = 0;
1639 // TODO: Mesa crashing with VK_REMAINING_ARRAY_LAYERS
1640 subrange.layerCount = 1; // VK_REMAINING_ARRAY_LAYERS;
1641
1642 const VkImageLayout clear_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1643
1644 for (const auto &color_obj : color_objs) {
1645 subrange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1646 color_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
1647 color_obj->SetLayout(this, subrange.aspectMask, clear_layout);
1648 ClearColorImage(color_obj->image(), clear_layout, &clear_color, 1, &subrange);
1649 }
1650
1651 if (depth_stencil_obj && depth_stencil_obj->Initialized()) {
1652 subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1653 if (FormatIsDepthOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1654 if (FormatIsStencilOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1655
1656 depth_stencil_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
1657 depth_stencil_obj->SetLayout(this, subrange.aspectMask, clear_layout);
1658
1659 VkClearDepthStencilValue clear_value = {depth_clear_value, stencil_clear_value};
1660 ClearDepthStencilImage(depth_stencil_obj->handle(), clear_layout, &clear_value, 1, &subrange);
1661 }
1662 }
1663
FillBuffer(VkBuffer buffer,VkDeviceSize offset,VkDeviceSize fill_size,uint32_t data)1664 void VkCommandBufferObj::FillBuffer(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize fill_size, uint32_t data) {
1665 vkCmdFillBuffer(handle(), buffer, offset, fill_size, data);
1666 }
1667
UpdateBuffer(VkBuffer buffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)1668 void VkCommandBufferObj::UpdateBuffer(VkBuffer buffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
1669 vkCmdUpdateBuffer(handle(), buffer, dstOffset, dataSize, pData);
1670 }
1671
CopyImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1672 void VkCommandBufferObj::CopyImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
1673 uint32_t regionCount, const VkImageCopy *pRegions) {
1674 vkCmdCopyImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
1675 }
1676
ResolveImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1677 void VkCommandBufferObj::ResolveImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
1678 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) {
1679 vkCmdResolveImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
1680 }
1681
ClearColorImage(VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1682 void VkCommandBufferObj::ClearColorImage(VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor,
1683 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
1684 vkCmdClearColorImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
1685 }
1686
ClearDepthStencilImage(VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1687 void VkCommandBufferObj::ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor,
1688 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
1689 vkCmdClearDepthStencilImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
1690 }
1691
PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> & color_atts,VkDepthStencilObj * depth_stencil_att)1692 void VkCommandBufferObj::PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> &color_atts,
1693 VkDepthStencilObj *depth_stencil_att) {
1694 for (const auto &color_att : color_atts) {
1695 color_att->SetLayout(this, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
1696 }
1697
1698 if (depth_stencil_att && depth_stencil_att->Initialized()) {
1699 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1700 if (FormatIsDepthOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1701 if (FormatIsStencilOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1702
1703 depth_stencil_att->SetLayout(this, aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
1704 }
1705 }
1706
BeginRenderPass(const VkRenderPassBeginInfo & info)1707 void VkCommandBufferObj::BeginRenderPass(const VkRenderPassBeginInfo &info) {
1708 vkCmdBeginRenderPass(handle(), &info, VK_SUBPASS_CONTENTS_INLINE);
1709 }
1710
EndRenderPass()1711 void VkCommandBufferObj::EndRenderPass() { vkCmdEndRenderPass(handle()); }
1712
SetViewport(uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1713 void VkCommandBufferObj::SetViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) {
1714 vkCmdSetViewport(handle(), firstViewport, viewportCount, pViewports);
1715 }
1716
SetStencilReference(VkStencilFaceFlags faceMask,uint32_t reference)1717 void VkCommandBufferObj::SetStencilReference(VkStencilFaceFlags faceMask, uint32_t reference) {
1718 vkCmdSetStencilReference(handle(), faceMask, reference);
1719 }
1720
DrawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1721 void VkCommandBufferObj::DrawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
1722 uint32_t firstInstance) {
1723 vkCmdDrawIndexed(handle(), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1724 }
1725
Draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1726 void VkCommandBufferObj::Draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1727 vkCmdDraw(handle(), vertexCount, instanceCount, firstVertex, firstInstance);
1728 }
1729
QueueCommandBuffer(bool checkSuccess)1730 void VkCommandBufferObj::QueueCommandBuffer(bool checkSuccess) {
1731 VkFenceObj nullFence;
1732 QueueCommandBuffer(nullFence, checkSuccess);
1733 }
1734
QueueCommandBuffer(const VkFenceObj & fence,bool checkSuccess)1735 void VkCommandBufferObj::QueueCommandBuffer(const VkFenceObj &fence, bool checkSuccess) {
1736 VkResult err = VK_SUCCESS;
1737
1738 err = m_queue->submit(*this, fence, checkSuccess);
1739 if (checkSuccess) {
1740 ASSERT_VK_SUCCESS(err);
1741 }
1742
1743 err = m_queue->wait();
1744 if (checkSuccess) {
1745 ASSERT_VK_SUCCESS(err);
1746 }
1747
1748 // TODO: Determine if we really want this serialization here
1749 // Wait for work to finish before cleaning up.
1750 vkDeviceWaitIdle(m_device->device());
1751 }
1752
BindDescriptorSet(VkDescriptorSetObj & descriptorSet)1753 void VkCommandBufferObj::BindDescriptorSet(VkDescriptorSetObj &descriptorSet) {
1754 VkDescriptorSet set_obj = descriptorSet.GetDescriptorSetHandle();
1755
1756 // bind pipeline, vertex buffer (descriptor set) and WVP (dynamic buffer view)
1757 if (set_obj) {
1758 vkCmdBindDescriptorSets(handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, descriptorSet.GetPipelineLayout(), 0, 1, &set_obj, 0,
1759 NULL);
1760 }
1761 }
1762
BindIndexBuffer(VkBufferObj * indexBuffer,VkDeviceSize offset,VkIndexType indexType)1763 void VkCommandBufferObj::BindIndexBuffer(VkBufferObj *indexBuffer, VkDeviceSize offset, VkIndexType indexType) {
1764 vkCmdBindIndexBuffer(handle(), indexBuffer->handle(), offset, indexType);
1765 }
1766
BindVertexBuffer(VkConstantBufferObj * vertexBuffer,VkDeviceSize offset,uint32_t binding)1767 void VkCommandBufferObj::BindVertexBuffer(VkConstantBufferObj *vertexBuffer, VkDeviceSize offset, uint32_t binding) {
1768 vkCmdBindVertexBuffers(handle(), binding, 1, &vertexBuffer->handle(), &offset);
1769 }
1770
VkCommandPoolObj(VkDeviceObj * device,uint32_t queue_family_index,VkCommandPoolCreateFlags flags)1771 VkCommandPoolObj::VkCommandPoolObj(VkDeviceObj *device, uint32_t queue_family_index, VkCommandPoolCreateFlags flags) {
1772 init(*device, vk_testing::CommandPool::create_info(queue_family_index, flags));
1773 }
1774
Initialized()1775 bool VkDepthStencilObj::Initialized() { return m_initialized; }
VkDepthStencilObj(VkDeviceObj * device)1776 VkDepthStencilObj::VkDepthStencilObj(VkDeviceObj *device) : VkImageObj(device) { m_initialized = false; }
1777
BindInfo()1778 VkImageView *VkDepthStencilObj::BindInfo() { return &m_attachmentBindInfo; }
1779
Format() const1780 VkFormat VkDepthStencilObj::Format() const { return this->m_depth_stencil_fmt; }
1781
Init(VkDeviceObj * device,int32_t width,int32_t height,VkFormat format,VkImageUsageFlags usage)1782 void VkDepthStencilObj::Init(VkDeviceObj *device, int32_t width, int32_t height, VkFormat format, VkImageUsageFlags usage) {
1783 VkImageViewCreateInfo view_info = {};
1784
1785 m_device = device;
1786 m_initialized = true;
1787 m_depth_stencil_fmt = format;
1788
1789 /* create image */
1790 VkImageObj::Init(width, height, 1, m_depth_stencil_fmt, usage, VK_IMAGE_TILING_OPTIMAL);
1791
1792 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1793 if (FormatIsDepthOnly(format))
1794 aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1795 else if (FormatIsStencilOnly(format))
1796 aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1797
1798 SetLayout(aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
1799
1800 view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1801 view_info.pNext = NULL;
1802 view_info.image = VK_NULL_HANDLE;
1803 view_info.subresourceRange.aspectMask = aspect;
1804 view_info.subresourceRange.baseMipLevel = 0;
1805 view_info.subresourceRange.levelCount = 1;
1806 view_info.subresourceRange.baseArrayLayer = 0;
1807 view_info.subresourceRange.layerCount = 1;
1808 view_info.flags = 0;
1809 view_info.format = m_depth_stencil_fmt;
1810 view_info.image = handle();
1811 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
1812 m_imageView.init(*m_device, view_info);
1813
1814 m_attachmentBindInfo = m_imageView.handle();
1815 }
1816