1 /*
2 * Copyright (c) 2015-2019 The Khronos Group Inc.
3 * Copyright (c) 2015-2019 Valve Corporation
4 * Copyright (c) 2015-2019 LunarG, Inc.
5 * Copyright (c) 2015-2019 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26
27 #include "cast_utils.h"
28 #include "layer_validation_tests.h"
29 //
30 // POSITIVE VALIDATION TESTS
31 //
32 // These tests do not expect to encounter ANY validation errors pass only if this is true
33
TEST_F(VkPositiveLayerTest,NullFunctionPointer)34 TEST_F(VkPositiveLayerTest, NullFunctionPointer) {
35 TEST_DESCRIPTION("On 1_0 instance , call GetDeviceProcAddr on promoted 1_1 device-level entrypoint");
36 SetTargetApiVersion(VK_API_VERSION_1_0);
37
38 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
39
40 if (DeviceExtensionSupported(gpu(), nullptr, "VK_KHR_get_memory_requirements2")) {
41 m_device_extension_names.push_back("VK_KHR_get_memory_requirements2");
42 } else {
43 printf("%s VK_KHR_get_memory_reqirements2 extension not supported, skipping NullFunctionPointer test\n", kSkipPrefix);
44 return;
45 }
46
47 ASSERT_NO_FATAL_FAILURE(InitState());
48
49 m_errorMonitor->ExpectSuccess();
50 auto fpGetBufferMemoryRequirements =
51 (PFN_vkGetBufferMemoryRequirements2)vkGetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2");
52 if (fpGetBufferMemoryRequirements) {
53 m_errorMonitor->SetError("Null was expected!");
54 }
55 m_errorMonitor->VerifyNotFound();
56 }
57
TEST_F(VkPositiveLayerTest,SecondaryCommandBufferBarrier)58 TEST_F(VkPositiveLayerTest, SecondaryCommandBufferBarrier) {
59 TEST_DESCRIPTION("Add a pipeline barrier in a secondary command buffer");
60 ASSERT_NO_FATAL_FAILURE(Init());
61
62 m_errorMonitor->ExpectSuccess();
63
64 // A renderpass with a single subpass that declared a self-dependency
65 VkAttachmentDescription attach[] = {
66 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
67 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
68 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
69 };
70 VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
71 VkSubpassDescription subpasses[] = {
72 {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
73 };
74 VkSubpassDependency dep = {0,
75 0,
76 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
77 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
78 VK_ACCESS_SHADER_WRITE_BIT,
79 VK_ACCESS_SHADER_WRITE_BIT,
80 VK_DEPENDENCY_BY_REGION_BIT};
81 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
82 VkRenderPass rp;
83
84 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
85 ASSERT_VK_SUCCESS(err);
86
87 VkImageObj image(m_device);
88 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
89 VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
90
91 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
92 VkFramebuffer fb;
93 err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
94 ASSERT_VK_SUCCESS(err);
95
96 m_commandBuffer->begin();
97
98 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
99 nullptr,
100 rp,
101 fb,
102 {{
103 0,
104 0,
105 },
106 {32, 32}},
107 0,
108 nullptr};
109
110 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
111
112 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
113 VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
114
115 VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
116 nullptr,
117 rp,
118 0,
119 VK_NULL_HANDLE, // Set to NULL FB handle intentionally to flesh out any errors
120 VK_FALSE,
121 0,
122 0};
123 VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
124 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
125 &cbii};
126 vkBeginCommandBuffer(secondary.handle(), &cbbi);
127 VkMemoryBarrier mem_barrier = {};
128 mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
129 mem_barrier.pNext = NULL;
130 mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
131 mem_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
132 vkCmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
133 VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0, nullptr);
134
135 image.ImageMemoryBarrier(&secondary, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
136 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
137 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
138 secondary.end();
139
140 vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
141 vkCmdEndRenderPass(m_commandBuffer->handle());
142 m_commandBuffer->end();
143
144 VkSubmitInfo submit_info = {};
145 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
146 submit_info.commandBufferCount = 1;
147 submit_info.pCommandBuffers = &m_commandBuffer->handle();
148 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
149 vkQueueWaitIdle(m_device->m_queue);
150
151 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
152 vkDestroyRenderPass(m_device->device(), rp, nullptr);
153 m_errorMonitor->VerifyNotFound();
154 }
155
TEST_F(VkPositiveLayerTest,RenderPassCreateAttachmentUsedTwiceOK)156 TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentUsedTwiceOK) {
157 TEST_DESCRIPTION("Attachment is used simultaneously as color and input, with the same layout. This is OK.");
158
159 ASSERT_NO_FATAL_FAILURE(Init());
160
161 VkAttachmentDescription attach[] = {
162 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_DONT_CARE,
163 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
164 };
165 VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_GENERAL};
166 VkSubpassDescription subpasses[] = {
167 {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 1, &ref, nullptr, nullptr, 0, nullptr},
168 };
169
170 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
171 VkRenderPass rp;
172
173 m_errorMonitor->ExpectSuccess();
174 vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
175 m_errorMonitor->VerifyNotFound();
176 vkDestroyRenderPass(m_device->device(), rp, nullptr);
177 }
178
TEST_F(VkPositiveLayerTest,RenderPassCreateInitialLayoutUndefined)179 TEST_F(VkPositiveLayerTest, RenderPassCreateInitialLayoutUndefined) {
180 TEST_DESCRIPTION(
181 "Ensure that CmdBeginRenderPass with an attachment's initialLayout of VK_IMAGE_LAYOUT_UNDEFINED works when the command "
182 "buffer has prior knowledge of that attachment's layout.");
183
184 m_errorMonitor->ExpectSuccess();
185
186 ASSERT_NO_FATAL_FAILURE(Init());
187
188 // A renderpass with one color attachment.
189 VkAttachmentDescription attachment = {0,
190 VK_FORMAT_R8G8B8A8_UNORM,
191 VK_SAMPLE_COUNT_1_BIT,
192 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
193 VK_ATTACHMENT_STORE_OP_STORE,
194 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
195 VK_ATTACHMENT_STORE_OP_DONT_CARE,
196 VK_IMAGE_LAYOUT_UNDEFINED,
197 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
198
199 VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
200
201 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
202
203 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
204
205 VkRenderPass rp;
206 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
207 ASSERT_VK_SUCCESS(err);
208
209 // A compatible framebuffer.
210 VkImageObj image(m_device);
211 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
212 ASSERT_TRUE(image.initialized());
213
214 VkImageViewCreateInfo ivci = {
215 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
216 nullptr,
217 0,
218 image.handle(),
219 VK_IMAGE_VIEW_TYPE_2D,
220 VK_FORMAT_R8G8B8A8_UNORM,
221 {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
222 VK_COMPONENT_SWIZZLE_IDENTITY},
223 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
224 };
225 VkImageView view;
226 err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
227 ASSERT_VK_SUCCESS(err);
228
229 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
230 VkFramebuffer fb;
231 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
232 ASSERT_VK_SUCCESS(err);
233
234 // Record a single command buffer which uses this renderpass twice. The
235 // bug is triggered at the beginning of the second renderpass, when the
236 // command buffer already has a layout recorded for the attachment.
237 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
238 m_commandBuffer->begin();
239 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
240 vkCmdEndRenderPass(m_commandBuffer->handle());
241 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
242
243 m_errorMonitor->VerifyNotFound();
244
245 vkCmdEndRenderPass(m_commandBuffer->handle());
246 m_commandBuffer->end();
247
248 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
249 vkDestroyRenderPass(m_device->device(), rp, nullptr);
250 vkDestroyImageView(m_device->device(), view, nullptr);
251 }
252
TEST_F(VkPositiveLayerTest,RenderPassCreateAttachmentLayoutWithLoadOpThenReadOnly)253 TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentLayoutWithLoadOpThenReadOnly) {
254 TEST_DESCRIPTION(
255 "Positive test where we create a renderpass with an attachment that uses LOAD_OP_CLEAR, the first subpass has a valid "
256 "layout, and a second subpass then uses a valid *READ_ONLY* layout.");
257 m_errorMonitor->ExpectSuccess();
258 ASSERT_NO_FATAL_FAILURE(Init());
259 auto depth_format = FindSupportedDepthStencilFormat(gpu());
260 if (!depth_format) {
261 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
262 return;
263 }
264
265 VkAttachmentReference attach[2] = {};
266 attach[0].attachment = 0;
267 attach[0].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
268 attach[1].attachment = 0;
269 attach[1].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
270 VkSubpassDescription subpasses[2] = {};
271 // First subpass clears DS attach on load
272 subpasses[0].pDepthStencilAttachment = &attach[0];
273 // 2nd subpass reads in DS as input attachment
274 subpasses[1].inputAttachmentCount = 1;
275 subpasses[1].pInputAttachments = &attach[1];
276 VkAttachmentDescription attach_desc = {};
277 attach_desc.format = depth_format;
278 attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
279 attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
280 attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
281 attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
282 attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
283 attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
284 attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
285 VkRenderPassCreateInfo rpci = {};
286 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
287 rpci.attachmentCount = 1;
288 rpci.pAttachments = &attach_desc;
289 rpci.subpassCount = 2;
290 rpci.pSubpasses = subpasses;
291
292 // Now create RenderPass and verify no errors
293 VkRenderPass rp;
294 vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
295 m_errorMonitor->VerifyNotFound();
296
297 vkDestroyRenderPass(m_device->device(), rp, NULL);
298 }
299
TEST_F(VkPositiveLayerTest,RenderPassBeginSubpassZeroTransitionsApplied)300 TEST_F(VkPositiveLayerTest, RenderPassBeginSubpassZeroTransitionsApplied) {
301 TEST_DESCRIPTION("Ensure that CmdBeginRenderPass applies the layout transitions for the first subpass");
302
303 m_errorMonitor->ExpectSuccess();
304
305 ASSERT_NO_FATAL_FAILURE(Init());
306
307 // A renderpass with one color attachment.
308 VkAttachmentDescription attachment = {0,
309 VK_FORMAT_R8G8B8A8_UNORM,
310 VK_SAMPLE_COUNT_1_BIT,
311 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
312 VK_ATTACHMENT_STORE_OP_STORE,
313 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
314 VK_ATTACHMENT_STORE_OP_DONT_CARE,
315 VK_IMAGE_LAYOUT_UNDEFINED,
316 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
317
318 VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
319
320 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
321
322 VkSubpassDependency dep = {0,
323 0,
324 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
325 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
326 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
327 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
328 VK_DEPENDENCY_BY_REGION_BIT};
329
330 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
331
332 VkResult err;
333 VkRenderPass rp;
334 err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
335 ASSERT_VK_SUCCESS(err);
336
337 // A compatible framebuffer.
338 VkImageObj image(m_device);
339 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
340 ASSERT_TRUE(image.initialized());
341
342 VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
343
344 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
345 VkFramebuffer fb;
346 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
347 ASSERT_VK_SUCCESS(err);
348
349 // Record a single command buffer which issues a pipeline barrier w/
350 // image memory barrier for the attachment. This detects the previously
351 // missing tracking of the subpass layout by throwing a validation error
352 // if it doesn't occur.
353 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
354 m_commandBuffer->begin();
355 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
356
357 image.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
358 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
359 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT);
360
361 vkCmdEndRenderPass(m_commandBuffer->handle());
362 m_errorMonitor->VerifyNotFound();
363 m_commandBuffer->end();
364
365 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
366 vkDestroyRenderPass(m_device->device(), rp, nullptr);
367 }
368
TEST_F(VkPositiveLayerTest,RenderPassBeginTransitionsAttachmentUnused)369 TEST_F(VkPositiveLayerTest, RenderPassBeginTransitionsAttachmentUnused) {
370 TEST_DESCRIPTION(
371 "Ensure that layout transitions work correctly without errors, when an attachment reference is VK_ATTACHMENT_UNUSED");
372
373 m_errorMonitor->ExpectSuccess();
374
375 ASSERT_NO_FATAL_FAILURE(Init());
376
377 // A renderpass with no attachments
378 VkAttachmentReference att_ref = {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
379
380 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
381
382 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &subpass, 0, nullptr};
383
384 VkRenderPass rp;
385 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
386 ASSERT_VK_SUCCESS(err);
387
388 // A compatible framebuffer.
389 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 32, 32, 1};
390 VkFramebuffer fb;
391 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
392 ASSERT_VK_SUCCESS(err);
393
394 // Record a command buffer which just begins and ends the renderpass. The
395 // bug manifests in BeginRenderPass.
396 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
397 m_commandBuffer->begin();
398 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
399 vkCmdEndRenderPass(m_commandBuffer->handle());
400 m_errorMonitor->VerifyNotFound();
401 m_commandBuffer->end();
402
403 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
404 vkDestroyRenderPass(m_device->device(), rp, nullptr);
405 }
406
TEST_F(VkPositiveLayerTest,RenderPassBeginStencilLoadOp)407 TEST_F(VkPositiveLayerTest, RenderPassBeginStencilLoadOp) {
408 TEST_DESCRIPTION("Create a stencil-only attachment with a LOAD_OP set to CLEAR. stencil[Load|Store]Op used to be ignored.");
409 VkResult result = VK_SUCCESS;
410 ASSERT_NO_FATAL_FAILURE(Init());
411 auto depth_format = FindSupportedDepthStencilFormat(gpu());
412 if (!depth_format) {
413 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
414 return;
415 }
416 VkImageFormatProperties formatProps;
417 vkGetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
418 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0,
419 &formatProps);
420 if (formatProps.maxExtent.width < 100 || formatProps.maxExtent.height < 100) {
421 printf("%s Image format max extent is too small.\n", kSkipPrefix);
422 return;
423 }
424
425 VkFormat depth_stencil_fmt = depth_format;
426 m_depthStencil->Init(m_device, 100, 100, depth_stencil_fmt,
427 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
428 VkAttachmentDescription att = {};
429 VkAttachmentReference ref = {};
430 att.format = depth_stencil_fmt;
431 att.samples = VK_SAMPLE_COUNT_1_BIT;
432 att.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
433 att.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
434 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
435 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
436 att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
437 att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
438
439 VkClearValue clear;
440 clear.depthStencil.depth = 1.0;
441 clear.depthStencil.stencil = 0;
442 ref.attachment = 0;
443 ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
444
445 VkSubpassDescription subpass = {};
446 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
447 subpass.flags = 0;
448 subpass.inputAttachmentCount = 0;
449 subpass.pInputAttachments = NULL;
450 subpass.colorAttachmentCount = 0;
451 subpass.pColorAttachments = NULL;
452 subpass.pResolveAttachments = NULL;
453 subpass.pDepthStencilAttachment = &ref;
454 subpass.preserveAttachmentCount = 0;
455 subpass.pPreserveAttachments = NULL;
456
457 VkRenderPass rp;
458 VkRenderPassCreateInfo rp_info = {};
459 rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
460 rp_info.attachmentCount = 1;
461 rp_info.pAttachments = &att;
462 rp_info.subpassCount = 1;
463 rp_info.pSubpasses = &subpass;
464 result = vkCreateRenderPass(device(), &rp_info, NULL, &rp);
465 ASSERT_VK_SUCCESS(result);
466
467 VkImageView *depthView = m_depthStencil->BindInfo();
468 VkFramebufferCreateInfo fb_info = {};
469 fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
470 fb_info.pNext = NULL;
471 fb_info.renderPass = rp;
472 fb_info.attachmentCount = 1;
473 fb_info.pAttachments = depthView;
474 fb_info.width = 100;
475 fb_info.height = 100;
476 fb_info.layers = 1;
477 VkFramebuffer fb;
478 result = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
479 ASSERT_VK_SUCCESS(result);
480
481 VkRenderPassBeginInfo rpbinfo = {};
482 rpbinfo.clearValueCount = 1;
483 rpbinfo.pClearValues = &clear;
484 rpbinfo.pNext = NULL;
485 rpbinfo.renderPass = rp;
486 rpbinfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
487 rpbinfo.renderArea.extent.width = 100;
488 rpbinfo.renderArea.extent.height = 100;
489 rpbinfo.renderArea.offset.x = 0;
490 rpbinfo.renderArea.offset.y = 0;
491 rpbinfo.framebuffer = fb;
492
493 VkFenceObj fence;
494 fence.init(*m_device, VkFenceObj::create_info());
495 ASSERT_TRUE(fence.initialized());
496
497 m_commandBuffer->begin();
498 m_commandBuffer->BeginRenderPass(rpbinfo);
499 m_commandBuffer->EndRenderPass();
500 m_commandBuffer->end();
501 m_commandBuffer->QueueCommandBuffer(fence);
502
503 VkImageObj destImage(m_device);
504 destImage.Init(100, 100, 1, depth_stencil_fmt, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
505 VK_IMAGE_TILING_OPTIMAL, 0);
506 fence.wait(VK_TRUE, UINT64_MAX);
507 VkCommandBufferObj cmdbuf(m_device, m_commandPool);
508 cmdbuf.begin();
509
510 m_depthStencil->ImageMemoryBarrier(&cmdbuf, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
511 VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
512 VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
513 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
514
515 destImage.ImageMemoryBarrier(&cmdbuf, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
516 VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, 0,
517 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
518 VkImageCopy cregion;
519 cregion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
520 cregion.srcSubresource.mipLevel = 0;
521 cregion.srcSubresource.baseArrayLayer = 0;
522 cregion.srcSubresource.layerCount = 1;
523 cregion.srcOffset.x = 0;
524 cregion.srcOffset.y = 0;
525 cregion.srcOffset.z = 0;
526 cregion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
527 cregion.dstSubresource.mipLevel = 0;
528 cregion.dstSubresource.baseArrayLayer = 0;
529 cregion.dstSubresource.layerCount = 1;
530 cregion.dstOffset.x = 0;
531 cregion.dstOffset.y = 0;
532 cregion.dstOffset.z = 0;
533 cregion.extent.width = 100;
534 cregion.extent.height = 100;
535 cregion.extent.depth = 1;
536 cmdbuf.CopyImage(m_depthStencil->handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, destImage.handle(),
537 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &cregion);
538 cmdbuf.end();
539
540 VkSubmitInfo submit_info;
541 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
542 submit_info.pNext = NULL;
543 submit_info.waitSemaphoreCount = 0;
544 submit_info.pWaitSemaphores = NULL;
545 submit_info.pWaitDstStageMask = NULL;
546 submit_info.commandBufferCount = 1;
547 submit_info.pCommandBuffers = &cmdbuf.handle();
548 submit_info.signalSemaphoreCount = 0;
549 submit_info.pSignalSemaphores = NULL;
550
551 m_errorMonitor->ExpectSuccess();
552 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
553 m_errorMonitor->VerifyNotFound();
554
555 vkQueueWaitIdle(m_device->m_queue);
556 vkDestroyRenderPass(m_device->device(), rp, nullptr);
557 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
558 }
559
TEST_F(VkPositiveLayerTest,RenderPassBeginInlineAndSecondaryCommandBuffers)560 TEST_F(VkPositiveLayerTest, RenderPassBeginInlineAndSecondaryCommandBuffers) {
561 m_errorMonitor->ExpectSuccess();
562
563 ASSERT_NO_FATAL_FAILURE(Init());
564 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
565
566 m_commandBuffer->begin();
567
568 vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
569 vkCmdEndRenderPass(m_commandBuffer->handle());
570 m_errorMonitor->VerifyNotFound();
571 vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
572 m_errorMonitor->VerifyNotFound();
573 vkCmdEndRenderPass(m_commandBuffer->handle());
574 m_errorMonitor->VerifyNotFound();
575
576 m_commandBuffer->end();
577 m_errorMonitor->VerifyNotFound();
578 }
579
TEST_F(VkPositiveLayerTest,RenderPassBeginDepthStencilLayoutTransitionFromUndefined)580 TEST_F(VkPositiveLayerTest, RenderPassBeginDepthStencilLayoutTransitionFromUndefined) {
581 TEST_DESCRIPTION(
582 "Create a render pass with depth-stencil attachment where layout transition from UNDEFINED TO DS_READ_ONLY_OPTIMAL is set "
583 "by render pass and verify that transition has correctly occurred at queue submit time with no validation errors.");
584
585 ASSERT_NO_FATAL_FAILURE(Init());
586 auto depth_format = FindSupportedDepthStencilFormat(gpu());
587 if (!depth_format) {
588 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
589 return;
590 }
591 VkImageFormatProperties format_props;
592 vkGetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
593 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, 0, &format_props);
594 if (format_props.maxExtent.width < 32 || format_props.maxExtent.height < 32) {
595 printf("%s Depth extent too small, RenderPassDepthStencilLayoutTransition skipped.\n", kSkipPrefix);
596 return;
597 }
598
599 m_errorMonitor->ExpectSuccess();
600 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
601
602 // A renderpass with one depth/stencil attachment.
603 VkAttachmentDescription attachment = {0,
604 depth_format,
605 VK_SAMPLE_COUNT_1_BIT,
606 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
607 VK_ATTACHMENT_STORE_OP_DONT_CARE,
608 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
609 VK_ATTACHMENT_STORE_OP_DONT_CARE,
610 VK_IMAGE_LAYOUT_UNDEFINED,
611 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
612
613 VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
614
615 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
616
617 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
618
619 VkRenderPass rp;
620 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
621 ASSERT_VK_SUCCESS(err);
622 // A compatible ds image.
623 VkImageObj image(m_device);
624 image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
625 ASSERT_TRUE(image.initialized());
626
627 VkImageViewCreateInfo ivci = {
628 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
629 nullptr,
630 0,
631 image.handle(),
632 VK_IMAGE_VIEW_TYPE_2D,
633 depth_format,
634 {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
635 VK_COMPONENT_SWIZZLE_IDENTITY},
636 {VK_IMAGE_ASPECT_DEPTH_BIT, 0, 1, 0, 1},
637 };
638 VkImageView view;
639 err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
640 ASSERT_VK_SUCCESS(err);
641
642 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
643 VkFramebuffer fb;
644 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
645 ASSERT_VK_SUCCESS(err);
646
647 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
648 m_commandBuffer->begin();
649 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
650 vkCmdEndRenderPass(m_commandBuffer->handle());
651 m_commandBuffer->end();
652 m_commandBuffer->QueueCommandBuffer(false);
653 m_errorMonitor->VerifyNotFound();
654
655 // Cleanup
656 vkDestroyImageView(m_device->device(), view, NULL);
657 vkDestroyRenderPass(m_device->device(), rp, NULL);
658 vkDestroyFramebuffer(m_device->device(), fb, NULL);
659 }
660
TEST_F(VkPositiveLayerTest,DestroyPipelineRenderPass)661 TEST_F(VkPositiveLayerTest, DestroyPipelineRenderPass) {
662 TEST_DESCRIPTION("Draw using a pipeline whose create renderPass has been destroyed.");
663 m_errorMonitor->ExpectSuccess();
664 ASSERT_NO_FATAL_FAILURE(Init());
665 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
666
667 VkResult err;
668
669 // Create a renderPass that's compatible with Draw-time renderPass
670 VkAttachmentDescription att = {};
671 att.format = m_render_target_fmt;
672 att.samples = VK_SAMPLE_COUNT_1_BIT;
673 att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
674 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
675 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
676 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
677 att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
678 att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
679
680 VkAttachmentReference ref = {};
681 ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
682 ref.attachment = 0;
683
684 m_renderPassClearValues.clear();
685 VkClearValue clear = {};
686 clear.color = m_clear_color;
687
688 VkSubpassDescription subpass = {};
689 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
690 subpass.flags = 0;
691 subpass.inputAttachmentCount = 0;
692 subpass.pInputAttachments = NULL;
693 subpass.colorAttachmentCount = 1;
694 subpass.pColorAttachments = &ref;
695 subpass.pResolveAttachments = NULL;
696
697 subpass.pDepthStencilAttachment = NULL;
698 subpass.preserveAttachmentCount = 0;
699 subpass.pPreserveAttachments = NULL;
700
701 VkRenderPassCreateInfo rp_info = {};
702 rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
703 rp_info.attachmentCount = 1;
704 rp_info.pAttachments = &att;
705 rp_info.subpassCount = 1;
706 rp_info.pSubpasses = &subpass;
707
708 VkRenderPass rp;
709 err = vkCreateRenderPass(device(), &rp_info, NULL, &rp);
710 ASSERT_VK_SUCCESS(err);
711
712 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
713 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
714
715 VkPipelineObj pipe(m_device);
716 pipe.AddDefaultColorAttachment();
717 pipe.AddShader(&vs);
718 pipe.AddShader(&fs);
719 VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
720 m_viewports.push_back(viewport);
721 pipe.SetViewport(m_viewports);
722 VkRect2D rect = {{0, 0}, {64, 64}};
723 m_scissors.push_back(rect);
724 pipe.SetScissor(m_scissors);
725
726 const VkPipelineLayoutObj pl(m_device);
727 pipe.CreateVKPipeline(pl.handle(), rp);
728
729 m_commandBuffer->begin();
730 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
731 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
732 // Destroy renderPass before pipeline is used in Draw
733 // We delay until after CmdBindPipeline to verify that invalid binding isn't
734 // created between CB & renderPass, which we used to do.
735 vkDestroyRenderPass(m_device->device(), rp, nullptr);
736 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
737 vkCmdEndRenderPass(m_commandBuffer->handle());
738 m_commandBuffer->end();
739
740 VkSubmitInfo submit_info = {};
741 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
742 submit_info.commandBufferCount = 1;
743 submit_info.pCommandBuffers = &m_commandBuffer->handle();
744 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
745 m_errorMonitor->VerifyNotFound();
746 vkQueueWaitIdle(m_device->m_queue);
747 }
748
TEST_F(VkPositiveLayerTest,BasicQuery)749 TEST_F(VkPositiveLayerTest, BasicQuery) {
750 TEST_DESCRIPTION("Use a couple occlusion queries");
751 m_errorMonitor->ExpectSuccess();
752 ASSERT_NO_FATAL_FAILURE(Init());
753 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
754
755 uint32_t qfi = 0;
756 VkBufferCreateInfo bci = {};
757 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
758 bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759 bci.size = 4 * sizeof(uint64_t);
760 bci.queueFamilyIndexCount = 1;
761 bci.pQueueFamilyIndices = &qfi;
762 VkBufferObj buffer;
763 VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
764 buffer.init(*m_device, bci, mem_props);
765
766 VkQueryPool query_pool;
767 VkQueryPoolCreateInfo query_pool_info;
768 query_pool_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
769 query_pool_info.pNext = NULL;
770 query_pool_info.queryType = VK_QUERY_TYPE_OCCLUSION;
771 query_pool_info.flags = 0;
772 query_pool_info.queryCount = 2;
773 query_pool_info.pipelineStatistics = 0;
774
775 VkResult res = vkCreateQueryPool(m_device->handle(), &query_pool_info, NULL, &query_pool);
776 ASSERT_VK_SUCCESS(res);
777
778 CreatePipelineHelper pipe(*this);
779 pipe.InitInfo();
780 pipe.InitState();
781 pipe.CreateGraphicsPipeline();
782
783 m_commandBuffer->begin();
784 vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 2);
785 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
786 vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
787 vkCmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
788 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
789 vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 1, 0);
790 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
791 vkCmdEndRenderPass(m_commandBuffer->handle());
792 vkCmdEndQuery(m_commandBuffer->handle(), query_pool, 1);
793 vkCmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 2, buffer.handle(), 0, sizeof(uint64_t),
794 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
795 m_commandBuffer->end();
796
797 VkSubmitInfo submit_info = {};
798 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
799 submit_info.commandBufferCount = 1;
800 submit_info.pCommandBuffers = &m_commandBuffer->handle();
801 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
802
803 vkQueueWaitIdle(m_device->m_queue);
804 uint64_t samples_passed[4];
805 res = vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 2, sizeof(samples_passed), samples_passed, sizeof(uint64_t),
806 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
807 ASSERT_VK_SUCCESS(res);
808 m_errorMonitor->VerifyNotFound();
809 vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
810 }
811
TEST_F(VkPositiveLayerTest,MultiplaneGetImageSubresourceLayout)812 TEST_F(VkPositiveLayerTest, MultiplaneGetImageSubresourceLayout) {
813 TEST_DESCRIPTION("Positive test, query layout of a single plane of a multiplane image. (repro Github #2530)");
814
815 // Enable KHR multiplane req'd extensions
816 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
817 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
818 if (mp_extensions) {
819 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
820 }
821 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
822 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
823 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
824 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
825 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
826 if (mp_extensions) {
827 m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
828 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
829 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
830 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
831 } else {
832 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
833 return;
834 }
835 ASSERT_NO_FATAL_FAILURE(InitState());
836
837 VkImageCreateInfo ci = {};
838 ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
839 ci.pNext = NULL;
840 ci.flags = 0;
841 ci.imageType = VK_IMAGE_TYPE_2D;
842 ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
843 ci.extent = {128, 128, 1};
844 ci.mipLevels = 1;
845 ci.arrayLayers = 1;
846 ci.samples = VK_SAMPLE_COUNT_1_BIT;
847 ci.tiling = VK_IMAGE_TILING_LINEAR;
848 ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
849 ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
850 ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
851
852 // Verify format
853 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
854 if (!supported) {
855 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
856 return; // Assume there's low ROI on searching for different mp formats
857 }
858
859 VkImage image;
860 VkResult err = vkCreateImage(device(), &ci, NULL, &image);
861 ASSERT_VK_SUCCESS(err);
862
863 // Query layout of 3rd plane
864 VkImageSubresource subres = {};
865 subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
866 subres.mipLevel = 0;
867 subres.arrayLayer = 0;
868 VkSubresourceLayout layout = {};
869
870 m_errorMonitor->ExpectSuccess();
871 vkGetImageSubresourceLayout(device(), image, &subres, &layout);
872 m_errorMonitor->VerifyNotFound();
873
874 vkDestroyImage(device(), image, NULL);
875 }
876
TEST_F(VkPositiveLayerTest,OwnershipTranfersImage)877 TEST_F(VkPositiveLayerTest, OwnershipTranfersImage) {
878 TEST_DESCRIPTION("Valid image ownership transfers that shouldn't create errors");
879 ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
880
881 uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
882 if (no_gfx == UINT32_MAX) {
883 printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
884 return;
885 }
886 VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
887
888 VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
889 VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
890
891 // Create an "exclusive" image owned by the graphics queue.
892 VkImageObj image(m_device);
893 VkFlags image_use = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
894 image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, image_use, VK_IMAGE_TILING_OPTIMAL, 0);
895 ASSERT_TRUE(image.initialized());
896 auto image_subres = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
897 auto image_barrier = image.image_memory_barrier(0, 0, image.Layout(), image.Layout(), image_subres);
898 image_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
899 image_barrier.dstQueueFamilyIndex = no_gfx;
900
901 ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
902 VK_PIPELINE_STAGE_TRANSFER_BIT, nullptr, &image_barrier);
903
904 // Change layouts while changing ownership
905 image_barrier.srcQueueFamilyIndex = no_gfx;
906 image_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
907 image_barrier.oldLayout = image.Layout();
908 // Make sure the new layout is different from the old
909 if (image_barrier.oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
910 image_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
911 } else {
912 image_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
913 }
914
915 ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
916 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, nullptr, &image_barrier);
917 }
918
TEST_F(VkPositiveLayerTest,OwnershipTranfersBuffer)919 TEST_F(VkPositiveLayerTest, OwnershipTranfersBuffer) {
920 TEST_DESCRIPTION("Valid buffer ownership transfers that shouldn't create errors");
921 ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
922
923 uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
924 if (no_gfx == UINT32_MAX) {
925 printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
926 return;
927 }
928 VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
929
930 VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
931 VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
932
933 // Create a buffer
934 const VkDeviceSize buffer_size = 256;
935 uint8_t data[buffer_size] = {0xFF};
936 VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
937 ASSERT_TRUE(buffer.initialized());
938 auto buffer_barrier = buffer.buffer_memory_barrier(0, 0, 0, VK_WHOLE_SIZE);
939
940 // Let gfx own it.
941 buffer_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
942 buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
943 ValidOwnershipTransferOp(m_errorMonitor, m_commandBuffer, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
944 &buffer_barrier, nullptr);
945
946 // Transfer it to non-gfx
947 buffer_barrier.dstQueueFamilyIndex = no_gfx;
948 ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
949 VK_PIPELINE_STAGE_TRANSFER_BIT, &buffer_barrier, nullptr);
950
951 // Transfer it to gfx
952 buffer_barrier.srcQueueFamilyIndex = no_gfx;
953 buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
954 ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
955 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, &buffer_barrier, nullptr);
956 }
957
TEST_F(VkPositiveLayerTest,LayoutFromPresentWithoutAccessMemoryRead)958 TEST_F(VkPositiveLayerTest, LayoutFromPresentWithoutAccessMemoryRead) {
959 // Transition an image away from PRESENT_SRC_KHR without ACCESS_MEMORY_READ
960 // in srcAccessMask.
961
962 // The required behavior here was a bit unclear in earlier versions of the
963 // spec, but there is no memory dependency required here, so this should
964 // work without warnings.
965
966 m_errorMonitor->ExpectSuccess();
967 ASSERT_NO_FATAL_FAILURE(Init());
968 VkImageObj image(m_device);
969 image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
970 VK_IMAGE_TILING_OPTIMAL, 0);
971 ASSERT_TRUE(image.initialized());
972
973 VkImageMemoryBarrier barrier = {};
974 VkImageSubresourceRange range;
975 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
976 barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
977 barrier.dstAccessMask = 0;
978 barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
979 barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
980 barrier.image = image.handle();
981 range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
982 range.baseMipLevel = 0;
983 range.levelCount = 1;
984 range.baseArrayLayer = 0;
985 range.layerCount = 1;
986 barrier.subresourceRange = range;
987 VkCommandBufferObj cmdbuf(m_device, m_commandPool);
988 cmdbuf.begin();
989 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
990 &barrier);
991 barrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
992 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
993 barrier.srcAccessMask = 0;
994 barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
995 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
996 &barrier);
997
998 m_errorMonitor->VerifyNotFound();
999 }
1000
TEST_F(VkPositiveLayerTest,CopyNonupdatedDescriptors)1001 TEST_F(VkPositiveLayerTest, CopyNonupdatedDescriptors) {
1002 TEST_DESCRIPTION("Copy non-updated descriptors");
1003 unsigned int i;
1004
1005 ASSERT_NO_FATAL_FAILURE(Init());
1006 OneOffDescriptorSet src_descriptor_set(m_device, {
1007 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1008 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
1009 {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
1010 });
1011 OneOffDescriptorSet dst_descriptor_set(m_device, {
1012 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1013 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
1014 });
1015
1016 m_errorMonitor->ExpectSuccess();
1017
1018 const unsigned int copy_size = 2;
1019 VkCopyDescriptorSet copy_ds_update[copy_size];
1020 memset(copy_ds_update, 0, sizeof(copy_ds_update));
1021 for (i = 0; i < copy_size; i++) {
1022 copy_ds_update[i].sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
1023 copy_ds_update[i].srcSet = src_descriptor_set.set_;
1024 copy_ds_update[i].srcBinding = i;
1025 copy_ds_update[i].dstSet = dst_descriptor_set.set_;
1026 copy_ds_update[i].dstBinding = i;
1027 copy_ds_update[i].descriptorCount = 1;
1028 }
1029 vkUpdateDescriptorSets(m_device->device(), 0, NULL, copy_size, copy_ds_update);
1030
1031 m_errorMonitor->VerifyNotFound();
1032 }
1033
TEST_F(VkPositiveLayerTest,ConfirmNoVLErrorWhenVkCmdClearAttachmentsCalledInSecondaryCB)1034 TEST_F(VkPositiveLayerTest, ConfirmNoVLErrorWhenVkCmdClearAttachmentsCalledInSecondaryCB) {
1035 TEST_DESCRIPTION(
1036 "This test is to verify that when vkCmdClearAttachments is called by a secondary commandbuffer, the validation layers do "
1037 "not throw an error if the primary commandbuffer begins a renderpass before executing the secondary commandbuffer.");
1038
1039 ASSERT_NO_FATAL_FAILURE(Init());
1040 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1041
1042 VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
1043
1044 VkCommandBufferBeginInfo info = {};
1045 VkCommandBufferInheritanceInfo hinfo = {};
1046 info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
1047 info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1048 info.pInheritanceInfo = &hinfo;
1049 hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
1050 hinfo.pNext = NULL;
1051 hinfo.renderPass = renderPass();
1052 hinfo.subpass = 0;
1053 hinfo.framebuffer = m_framebuffer;
1054 hinfo.occlusionQueryEnable = VK_FALSE;
1055 hinfo.queryFlags = 0;
1056 hinfo.pipelineStatistics = 0;
1057
1058 secondary.begin(&info);
1059 VkClearAttachment color_attachment;
1060 color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1061 color_attachment.clearValue.color.float32[0] = 0.0;
1062 color_attachment.clearValue.color.float32[1] = 0.0;
1063 color_attachment.clearValue.color.float32[2] = 0.0;
1064 color_attachment.clearValue.color.float32[3] = 0.0;
1065 color_attachment.colorAttachment = 0;
1066 VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
1067 vkCmdClearAttachments(secondary.handle(), 1, &color_attachment, 1, &clear_rect);
1068 secondary.end();
1069 // Modify clear rect here to verify that it doesn't cause validation error
1070 clear_rect = {{{0, 0}, {99999999, 99999999}}, 0, 0};
1071
1072 m_commandBuffer->begin();
1073 vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
1074 vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
1075 vkCmdEndRenderPass(m_commandBuffer->handle());
1076 m_commandBuffer->end();
1077 m_errorMonitor->VerifyNotFound();
1078 }
1079
TEST_F(VkPositiveLayerTest,CreatePipelineComplexTypes)1080 TEST_F(VkPositiveLayerTest, CreatePipelineComplexTypes) {
1081 TEST_DESCRIPTION("Smoke test for complex types across VS/FS boundary");
1082 ASSERT_NO_FATAL_FAILURE(Init());
1083 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1084
1085 if (!m_device->phy().features().tessellationShader) {
1086 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
1087 return;
1088 }
1089
1090 m_errorMonitor->ExpectSuccess();
1091
1092 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
1093 VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
1094 VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
1095 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1096
1097 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
1098 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
1099 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
1100
1101 CreatePipelineHelper pipe(*this);
1102 pipe.InitInfo();
1103 pipe.gp_ci_.pTessellationState = &tsci;
1104 pipe.gp_ci_.pInputAssemblyState = &iasci;
1105 pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1106 pipe.InitState();
1107 pipe.CreateGraphicsPipeline();
1108 m_errorMonitor->VerifyNotFound();
1109 }
1110
TEST_F(VkPositiveLayerTest,ShaderRelaxedBlockLayout)1111 TEST_F(VkPositiveLayerTest, ShaderRelaxedBlockLayout) {
1112 // This is a positive test, no errors expected
1113 // Verifies the ability to relax block layout rules with a shader that requires them to be relaxed
1114 TEST_DESCRIPTION("Create a shader that requires relaxed block layout.");
1115
1116 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
1117
1118 // The Relaxed Block Layout extension was promoted to core in 1.1.
1119 // Go ahead and check for it and turn it on in case a 1.0 device has it.
1120 if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME)) {
1121 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
1122 return;
1123 }
1124 m_device_extension_names.push_back(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
1125 ASSERT_NO_FATAL_FAILURE(InitState());
1126 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1127
1128 // Vertex shader requiring relaxed layout.
1129 // Without relaxed layout, we would expect a message like:
1130 // "Structure id 2 decorated as Block for variable in Uniform storage class
1131 // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
1132
1133 const std::string spv_source = R"(
1134 OpCapability Shader
1135 OpMemoryModel Logical GLSL450
1136 OpEntryPoint Vertex %main "main"
1137 OpSource GLSL 450
1138 OpMemberDecorate %S 0 Offset 0
1139 OpMemberDecorate %S 1 Offset 4
1140 OpDecorate %S Block
1141 OpDecorate %B DescriptorSet 0
1142 OpDecorate %B Binding 0
1143 %void = OpTypeVoid
1144 %3 = OpTypeFunction %void
1145 %float = OpTypeFloat 32
1146 %v3float = OpTypeVector %float 3
1147 %S = OpTypeStruct %float %v3float
1148 %_ptr_Uniform_S = OpTypePointer Uniform %S
1149 %B = OpVariable %_ptr_Uniform_S Uniform
1150 %main = OpFunction %void None %3
1151 %5 = OpLabel
1152 OpReturn
1153 OpFunctionEnd
1154 )";
1155 m_errorMonitor->ExpectSuccess();
1156 VkShaderObj vs(m_device, spv_source, VK_SHADER_STAGE_VERTEX_BIT, this);
1157 m_errorMonitor->VerifyNotFound();
1158 }
1159
TEST_F(VkPositiveLayerTest,ShaderUboStd430Layout)1160 TEST_F(VkPositiveLayerTest, ShaderUboStd430Layout) {
1161 // This is a positive test, no errors expected
1162 // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
1163 TEST_DESCRIPTION("Create a shader that requires UBO std430 layout.");
1164 // Enable req'd extensions
1165 if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1166 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
1167 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1168 return;
1169 }
1170 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1171 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
1172
1173 // Check for the UBO standard block layout extension and turn it on if it's available
1174 if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME)) {
1175 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
1176 VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
1177 return;
1178 }
1179 m_device_extension_names.push_back(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
1180
1181 PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
1182 (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
1183
1184 auto uniform_buffer_standard_layout_features = lvl_init_struct<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(NULL);
1185 uniform_buffer_standard_layout_features.uniformBufferStandardLayout = VK_TRUE;
1186 auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&uniform_buffer_standard_layout_features);
1187 vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
1188
1189 auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&uniform_buffer_standard_layout_features);
1190
1191 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
1192 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1193
1194 // Vertex shader requiring std430 in a uniform buffer.
1195 // Without uniform buffer standard layout, we would expect a message like:
1196 // "Structure id 3 decorated as Block for variable in Uniform storage class
1197 // must follow standard uniform buffer layout rules: member 0 is an array
1198 // with stride 4 not satisfying alignment to 16"
1199
1200 const std::string spv_source = R"(
1201 OpCapability Shader
1202 OpMemoryModel Logical GLSL450
1203 OpEntryPoint Vertex %main "main"
1204 OpSource GLSL 460
1205 OpDecorate %_arr_float_uint_8 ArrayStride 4
1206 OpMemberDecorate %foo 0 Offset 0
1207 OpDecorate %foo Block
1208 OpDecorate %b DescriptorSet 0
1209 OpDecorate %b Binding 0
1210 %void = OpTypeVoid
1211 %3 = OpTypeFunction %void
1212 %float = OpTypeFloat 32
1213 %uint = OpTypeInt 32 0
1214 %uint_8 = OpConstant %uint 8
1215 %_arr_float_uint_8 = OpTypeArray %float %uint_8
1216 %foo = OpTypeStruct %_arr_float_uint_8
1217 %_ptr_Uniform_foo = OpTypePointer Uniform %foo
1218 %b = OpVariable %_ptr_Uniform_foo Uniform
1219 %main = OpFunction %void None %3
1220 %5 = OpLabel
1221 OpReturn
1222 OpFunctionEnd
1223 )";
1224
1225 std::vector<unsigned int> spv;
1226 VkShaderModuleCreateInfo module_create_info;
1227 VkShaderModule shader_module;
1228 module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1229 module_create_info.pNext = NULL;
1230 ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
1231 module_create_info.pCode = spv.data();
1232 module_create_info.codeSize = spv.size() * sizeof(unsigned int);
1233 module_create_info.flags = 0;
1234
1235 m_errorMonitor->ExpectSuccess();
1236 VkResult err = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
1237 m_errorMonitor->VerifyNotFound();
1238 if (err == VK_SUCCESS) {
1239 vkDestroyShaderModule(m_device->handle(), shader_module, NULL);
1240 }
1241 }
1242
TEST_F(VkPositiveLayerTest,ShaderScalarBlockLayout)1243 TEST_F(VkPositiveLayerTest, ShaderScalarBlockLayout) {
1244 // This is a positive test, no errors expected
1245 // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
1246 TEST_DESCRIPTION("Create a shader that requires scalar block layout.");
1247 // Enable req'd extensions
1248 if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1249 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
1250 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1251 return;
1252 }
1253 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1254 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
1255
1256 // Check for the Scalar Block Layout extension and turn it on if it's available
1257 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME)) {
1258 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
1259 return;
1260 }
1261 m_device_extension_names.push_back(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
1262
1263 PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
1264 (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
1265
1266 auto scalar_block_features = lvl_init_struct<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(NULL);
1267 scalar_block_features.scalarBlockLayout = VK_TRUE;
1268 auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
1269 vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
1270
1271 auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
1272
1273 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
1274 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1275
1276 // Vertex shader requiring scalar layout.
1277 // Without scalar layout, we would expect a message like:
1278 // "Structure id 2 decorated as Block for variable in Uniform storage class
1279 // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
1280
1281 const std::string spv_source = R"(
1282 OpCapability Shader
1283 OpMemoryModel Logical GLSL450
1284 OpEntryPoint Vertex %main "main"
1285 OpSource GLSL 450
1286 OpMemberDecorate %S 0 Offset 0
1287 OpMemberDecorate %S 1 Offset 4
1288 OpMemberDecorate %S 2 Offset 8
1289 OpDecorate %S Block
1290 OpDecorate %B DescriptorSet 0
1291 OpDecorate %B Binding 0
1292 %void = OpTypeVoid
1293 %3 = OpTypeFunction %void
1294 %float = OpTypeFloat 32
1295 %v3float = OpTypeVector %float 3
1296 %S = OpTypeStruct %float %float %v3float
1297 %_ptr_Uniform_S = OpTypePointer Uniform %S
1298 %B = OpVariable %_ptr_Uniform_S Uniform
1299 %main = OpFunction %void None %3
1300 %5 = OpLabel
1301 OpReturn
1302 OpFunctionEnd
1303 )";
1304
1305 m_errorMonitor->ExpectSuccess();
1306 VkShaderObj vs(m_device, spv_source, VK_SHADER_STAGE_VERTEX_BIT, this);
1307 m_errorMonitor->VerifyNotFound();
1308 }
1309
TEST_F(VkPositiveLayerTest,SpirvGroupDecorations)1310 TEST_F(VkPositiveLayerTest, SpirvGroupDecorations) {
1311 TEST_DESCRIPTION("Test shader validation support for group decorations.");
1312 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
1313 ASSERT_NO_FATAL_FAILURE(InitState());
1314 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1315
1316 const std::string spv_source = R"(
1317 OpCapability Shader
1318 OpMemoryModel Logical GLSL450
1319 OpEntryPoint GLCompute %main "main" %gl_GlobalInvocationID
1320 OpExecutionMode %main LocalSize 1 1 1
1321 OpSource GLSL 430
1322 OpName %main "main"
1323 OpName %gl_GlobalInvocationID "gl_GlobalInvocationID"
1324 OpDecorate %gl_GlobalInvocationID BuiltIn GlobalInvocationId
1325 OpDecorate %_runtimearr_float ArrayStride 4
1326 OpDecorate %4 BufferBlock
1327 OpDecorate %5 Offset 0
1328 %4 = OpDecorationGroup
1329 %5 = OpDecorationGroup
1330 OpGroupDecorate %4 %_struct_6 %_struct_7 %_struct_8 %_struct_9 %_struct_10 %_struct_11
1331 OpGroupMemberDecorate %5 %_struct_6 0 %_struct_7 0 %_struct_8 0 %_struct_9 0 %_struct_10 0 %_struct_11 0
1332 OpDecorate %12 DescriptorSet 0
1333 OpDecorate %13 DescriptorSet 0
1334 OpDecorate %13 NonWritable
1335 OpDecorate %13 Restrict
1336 %14 = OpDecorationGroup
1337 %12 = OpDecorationGroup
1338 %13 = OpDecorationGroup
1339 OpGroupDecorate %12 %15
1340 OpGroupDecorate %12 %15
1341 OpGroupDecorate %12 %15
1342 OpDecorate %15 DescriptorSet 0
1343 OpDecorate %15 Binding 5
1344 OpGroupDecorate %14 %16
1345 OpDecorate %16 DescriptorSet 0
1346 OpDecorate %16 Binding 0
1347 OpGroupDecorate %12 %17
1348 OpDecorate %17 Binding 1
1349 OpGroupDecorate %13 %18 %19
1350 OpDecorate %18 Binding 2
1351 OpDecorate %19 Binding 3
1352 OpGroupDecorate %14 %20
1353 OpGroupDecorate %12 %20
1354 OpGroupDecorate %13 %20
1355 OpDecorate %20 Binding 4
1356 %bool = OpTypeBool
1357 %void = OpTypeVoid
1358 %23 = OpTypeFunction %void
1359 %uint = OpTypeInt 32 0
1360 %int = OpTypeInt 32 1
1361 %float = OpTypeFloat 32
1362 %v3uint = OpTypeVector %uint 3
1363 %v3float = OpTypeVector %float 3
1364 %_ptr_Input_v3uint = OpTypePointer Input %v3uint
1365 %_ptr_Uniform_int = OpTypePointer Uniform %int
1366 %_ptr_Uniform_float = OpTypePointer Uniform %float
1367 %_runtimearr_int = OpTypeRuntimeArray %int
1368 %_runtimearr_float = OpTypeRuntimeArray %float
1369 %gl_GlobalInvocationID = OpVariable %_ptr_Input_v3uint Input
1370 %int_0 = OpConstant %int 0
1371 %_struct_6 = OpTypeStruct %_runtimearr_float
1372 %_ptr_Uniform__struct_6 = OpTypePointer Uniform %_struct_6
1373 %15 = OpVariable %_ptr_Uniform__struct_6 Uniform
1374 %_struct_7 = OpTypeStruct %_runtimearr_float
1375 %_ptr_Uniform__struct_7 = OpTypePointer Uniform %_struct_7
1376 %16 = OpVariable %_ptr_Uniform__struct_7 Uniform
1377 %_struct_8 = OpTypeStruct %_runtimearr_float
1378 %_ptr_Uniform__struct_8 = OpTypePointer Uniform %_struct_8
1379 %17 = OpVariable %_ptr_Uniform__struct_8 Uniform
1380 %_struct_9 = OpTypeStruct %_runtimearr_float
1381 %_ptr_Uniform__struct_9 = OpTypePointer Uniform %_struct_9
1382 %18 = OpVariable %_ptr_Uniform__struct_9 Uniform
1383 %_struct_10 = OpTypeStruct %_runtimearr_float
1384 %_ptr_Uniform__struct_10 = OpTypePointer Uniform %_struct_10
1385 %19 = OpVariable %_ptr_Uniform__struct_10 Uniform
1386 %_struct_11 = OpTypeStruct %_runtimearr_float
1387 %_ptr_Uniform__struct_11 = OpTypePointer Uniform %_struct_11
1388 %20 = OpVariable %_ptr_Uniform__struct_11 Uniform
1389 %main = OpFunction %void None %23
1390 %40 = OpLabel
1391 %41 = OpLoad %v3uint %gl_GlobalInvocationID
1392 %42 = OpCompositeExtract %uint %41 0
1393 %43 = OpAccessChain %_ptr_Uniform_float %16 %int_0 %42
1394 %44 = OpAccessChain %_ptr_Uniform_float %17 %int_0 %42
1395 %45 = OpAccessChain %_ptr_Uniform_float %18 %int_0 %42
1396 %46 = OpAccessChain %_ptr_Uniform_float %19 %int_0 %42
1397 %47 = OpAccessChain %_ptr_Uniform_float %20 %int_0 %42
1398 %48 = OpAccessChain %_ptr_Uniform_float %15 %int_0 %42
1399 %49 = OpLoad %float %43
1400 %50 = OpLoad %float %44
1401 %51 = OpLoad %float %45
1402 %52 = OpLoad %float %46
1403 %53 = OpLoad %float %47
1404 %54 = OpFAdd %float %49 %50
1405 %55 = OpFAdd %float %54 %51
1406 %56 = OpFAdd %float %55 %52
1407 %57 = OpFAdd %float %56 %53
1408 OpStore %48 %57
1409 OpReturn
1410 OpFunctionEnd
1411 )";
1412
1413 // CreateDescriptorSetLayout
1414 VkDescriptorSetLayoutBinding dslb[6] = {};
1415 size_t dslb_size = size(dslb);
1416 for (size_t i = 0; i < dslb_size; i++) {
1417 dslb[i].binding = i;
1418 dslb[i].descriptorCount = 1;
1419 dslb[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1420 dslb[i].pImmutableSamplers = NULL;
1421 dslb[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_ALL;
1422 }
1423 if (m_device->props.limits.maxPerStageDescriptorStorageBuffers < dslb_size) {
1424 printf("%sNeeded storage buffer bindings exceeds this devices limit. Skipping tests.\n", kSkipPrefix);
1425 return;
1426 }
1427
1428 CreateComputePipelineHelper pipe(*this);
1429 pipe.InitInfo();
1430 pipe.dsl_bindings_.resize(dslb_size);
1431 memcpy(pipe.dsl_bindings_.data(), dslb, dslb_size * sizeof(VkDescriptorSetLayoutBinding));
1432 pipe.cs_.reset(new VkShaderObj(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, this));
1433 pipe.InitState();
1434 m_errorMonitor->ExpectSuccess();
1435 pipe.CreateComputePipeline();
1436 m_errorMonitor->VerifyNotFound();
1437 }
1438
TEST_F(VkPositiveLayerTest,CreatePipelineCheckShaderCapabilityExtension1of2)1439 TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension1of2) {
1440 // This is a positive test, no errors expected
1441 // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
1442 TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 1 of 2");
1443
1444 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
1445 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME)) {
1446 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
1447 VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
1448 return;
1449 }
1450 m_device_extension_names.push_back(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
1451 ASSERT_NO_FATAL_FAILURE(InitState());
1452
1453 // These tests require that the device support multiViewport
1454 if (!m_device->phy().features().multiViewport) {
1455 printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
1456 return;
1457 }
1458 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1459
1460 // Vertex shader using viewport array capability
1461 char const *vsSource =
1462 "#version 450\n"
1463 "#extension GL_ARB_shader_viewport_layer_array : enable\n"
1464 "void main() {\n"
1465 " gl_ViewportIndex = 1;\n"
1466 "}\n";
1467
1468 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
1469
1470 CreatePipelineHelper pipe(*this);
1471 pipe.InitInfo();
1472 pipe.shader_stages_ = {vs.GetStageCreateInfo()};
1473 pipe.InitState();
1474 m_errorMonitor->ExpectSuccess();
1475 pipe.CreateGraphicsPipeline();
1476 m_errorMonitor->VerifyNotFound();
1477 }
1478
TEST_F(VkPositiveLayerTest,CreatePipelineCheckShaderCapabilityExtension2of2)1479 TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension2of2) {
1480 // This is a positive test, no errors expected
1481 // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
1482 TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 2 of 2");
1483
1484 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
1485 if (!DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
1486 printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
1487 return;
1488 }
1489 m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
1490 ASSERT_NO_FATAL_FAILURE(InitState());
1491
1492 // These tests require that the device support multiViewport
1493 if (!m_device->phy().features().multiViewport) {
1494 printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
1495 return;
1496 }
1497 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1498
1499 // Vertex shader using viewport array capability
1500 char const *vsSource =
1501 "#version 450\n"
1502 "#extension GL_ARB_shader_viewport_layer_array : enable\n"
1503 "void main() {\n"
1504 " gl_ViewportIndex = 1;\n"
1505 "}\n";
1506
1507 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
1508
1509 CreatePipelineHelper pipe(*this);
1510 pipe.InitInfo();
1511 pipe.shader_stages_ = {vs.GetStageCreateInfo()};
1512 pipe.InitState();
1513 m_errorMonitor->ExpectSuccess();
1514 pipe.CreateGraphicsPipeline();
1515 m_errorMonitor->VerifyNotFound();
1516 }
1517
TEST_F(VkPositiveLayerTest,CreatePipelineFragmentOutputNotWrittenButMasked)1518 TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotWrittenButMasked) {
1519 TEST_DESCRIPTION(
1520 "Test that no error is produced when the fragment shader fails to declare an output, but the corresponding attachment's "
1521 "write mask is 0.");
1522 m_errorMonitor->ExpectSuccess();
1523
1524 ASSERT_NO_FATAL_FAILURE(Init());
1525
1526 char const *fsSource =
1527 "#version 450\n"
1528 "\n"
1529 "void main(){\n"
1530 "}\n";
1531
1532 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
1533 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1534
1535 VkPipelineObj pipe(m_device);
1536 pipe.AddShader(&vs);
1537 pipe.AddShader(&fs);
1538
1539 /* set up CB 0, not written, but also masked */
1540 pipe.AddDefaultColorAttachment(0);
1541 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1542
1543 VkDescriptorSetObj descriptorSet(m_device);
1544 descriptorSet.AppendDummy();
1545 descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
1546
1547 pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
1548
1549 m_errorMonitor->VerifyNotFound();
1550 }
1551
TEST_F(VkPositiveLayerTest,StatelessValidationDisable)1552 TEST_F(VkPositiveLayerTest, StatelessValidationDisable) {
1553 TEST_DESCRIPTION("Specify a non-zero value for a reserved parameter with stateless validation disabled");
1554
1555 VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT};
1556 VkValidationFeaturesEXT features = {};
1557 features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
1558 features.disabledValidationFeatureCount = 1;
1559 features.pDisabledValidationFeatures = disables;
1560 VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1561 ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
1562
1563 m_errorMonitor->ExpectSuccess();
1564 // Specify 0 for a reserved VkFlags parameter. Normally this is expected to trigger an stateless validation error, but this
1565 // validation was disabled via the features extension, so no errors should be forthcoming.
1566 VkEvent event_handle = VK_NULL_HANDLE;
1567 VkEventCreateInfo event_info = {};
1568 event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
1569 event_info.flags = 1;
1570 vkCreateEvent(device(), &event_info, NULL, &event_handle);
1571 vkDestroyEvent(device(), event_handle, NULL);
1572 m_errorMonitor->VerifyNotFound();
1573 }
1574
TEST_F(VkPositiveLayerTest,PointSizeWriteInFunction)1575 TEST_F(VkPositiveLayerTest, PointSizeWriteInFunction) {
1576 TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize in vertex shader function.");
1577
1578 ASSERT_NO_FATAL_FAILURE(Init());
1579 m_errorMonitor->ExpectSuccess();
1580 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1581 ASSERT_NO_FATAL_FAILURE(InitViewport());
1582
1583 // Create VS declaring PointSize and write to it in a function call.
1584 VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
1585 VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1586 {
1587 CreatePipelineHelper pipe(*this);
1588 pipe.InitInfo();
1589 pipe.shader_stages_ = {vs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
1590 pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1591 pipe.InitState();
1592 pipe.CreateGraphicsPipeline();
1593 }
1594 m_errorMonitor->VerifyNotFound();
1595 }
1596
TEST_F(VkPositiveLayerTest,PointSizeGeomShaderSuccess)1597 TEST_F(VkPositiveLayerTest, PointSizeGeomShaderSuccess) {
1598 TEST_DESCRIPTION(
1599 "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, and write in the final geometry stage.");
1600
1601 ASSERT_NO_FATAL_FAILURE(Init());
1602 m_errorMonitor->ExpectSuccess();
1603
1604 if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
1605 printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
1606 return;
1607 }
1608 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1609 ASSERT_NO_FATAL_FAILURE(InitViewport());
1610
1611 // Create VS declaring PointSize and writing to it
1612 VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
1613 VkShaderObj gs(m_device, bindStateGeomPointSizeShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
1614 VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1615
1616 CreatePipelineHelper pipe(*this);
1617 pipe.InitInfo();
1618 pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
1619 // Set Input Assembly to TOPOLOGY POINT LIST
1620 pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1621 pipe.InitState();
1622 pipe.CreateGraphicsPipeline();
1623 m_errorMonitor->VerifyNotFound();
1624 }
1625
TEST_F(VkPositiveLayerTest,LoosePointSizeWrite)1626 TEST_F(VkPositiveLayerTest, LoosePointSizeWrite) {
1627 TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize outside of a structure.");
1628
1629 ASSERT_NO_FATAL_FAILURE(Init());
1630 m_errorMonitor->ExpectSuccess();
1631 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1632 ASSERT_NO_FATAL_FAILURE(InitViewport());
1633
1634 const std::string LoosePointSizeWrite = R"(
1635 OpCapability Shader
1636 %1 = OpExtInstImport "GLSL.std.450"
1637 OpMemoryModel Logical GLSL450
1638 OpEntryPoint Vertex %main "main" %glposition %glpointsize %gl_VertexIndex
1639 OpSource GLSL 450
1640 OpName %main "main"
1641 OpName %vertices "vertices"
1642 OpName %glposition "glposition"
1643 OpName %glpointsize "glpointsize"
1644 OpName %gl_VertexIndex "gl_VertexIndex"
1645 OpDecorate %glposition BuiltIn Position
1646 OpDecorate %glpointsize BuiltIn PointSize
1647 OpDecorate %gl_VertexIndex BuiltIn VertexIndex
1648 %void = OpTypeVoid
1649 %3 = OpTypeFunction %void
1650 %float = OpTypeFloat 32
1651 %v2float = OpTypeVector %float 2
1652 %uint = OpTypeInt 32 0
1653 %uint_3 = OpConstant %uint 3
1654 %_arr_v2float_uint_3 = OpTypeArray %v2float %uint_3
1655 %_ptr_Private__arr_v2float_uint_3 = OpTypePointer Private %_arr_v2float_uint_3
1656 %vertices = OpVariable %_ptr_Private__arr_v2float_uint_3 Private
1657 %int = OpTypeInt 32 1
1658 %int_0 = OpConstant %int 0
1659 %float_n1 = OpConstant %float -1
1660 %16 = OpConstantComposite %v2float %float_n1 %float_n1
1661 %_ptr_Private_v2float = OpTypePointer Private %v2float
1662 %int_1 = OpConstant %int 1
1663 %float_1 = OpConstant %float 1
1664 %21 = OpConstantComposite %v2float %float_1 %float_n1
1665 %int_2 = OpConstant %int 2
1666 %float_0 = OpConstant %float 0
1667 %25 = OpConstantComposite %v2float %float_0 %float_1
1668 %v4float = OpTypeVector %float 4
1669 %_ptr_Output_gl_Position = OpTypePointer Output %v4float
1670 %glposition = OpVariable %_ptr_Output_gl_Position Output
1671 %_ptr_Output_gl_PointSize = OpTypePointer Output %float
1672 %glpointsize = OpVariable %_ptr_Output_gl_PointSize Output
1673 %_ptr_Input_int = OpTypePointer Input %int
1674 %gl_VertexIndex = OpVariable %_ptr_Input_int Input
1675 %int_3 = OpConstant %int 3
1676 %_ptr_Output_v4float = OpTypePointer Output %v4float
1677 %_ptr_Output_float = OpTypePointer Output %float
1678 %main = OpFunction %void None %3
1679 %5 = OpLabel
1680 %18 = OpAccessChain %_ptr_Private_v2float %vertices %int_0
1681 OpStore %18 %16
1682 %22 = OpAccessChain %_ptr_Private_v2float %vertices %int_1
1683 OpStore %22 %21
1684 %26 = OpAccessChain %_ptr_Private_v2float %vertices %int_2
1685 OpStore %26 %25
1686 %33 = OpLoad %int %gl_VertexIndex
1687 %35 = OpSMod %int %33 %int_3
1688 %36 = OpAccessChain %_ptr_Private_v2float %vertices %35
1689 %37 = OpLoad %v2float %36
1690 %38 = OpCompositeExtract %float %37 0
1691 %39 = OpCompositeExtract %float %37 1
1692 %40 = OpCompositeConstruct %v4float %38 %39 %float_0 %float_1
1693 %42 = OpAccessChain %_ptr_Output_v4float %glposition
1694 OpStore %42 %40
1695 OpStore %glpointsize %float_1
1696 OpReturn
1697 OpFunctionEnd
1698 )";
1699
1700 // Create VS declaring PointSize and write to it in a function call.
1701 VkShaderObj vs(m_device, LoosePointSizeWrite, VK_SHADER_STAGE_VERTEX_BIT, this);
1702 VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1703
1704 {
1705 CreatePipelineHelper pipe(*this);
1706 pipe.InitInfo();
1707 pipe.shader_stages_ = {vs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
1708 // Set Input Assembly to TOPOLOGY POINT LIST
1709 pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1710 pipe.InitState();
1711 pipe.CreateGraphicsPipeline();
1712 }
1713 m_errorMonitor->VerifyNotFound();
1714 }
1715
TEST_F(VkPositiveLayerTest,UncompressedToCompressedImageCopy)1716 TEST_F(VkPositiveLayerTest, UncompressedToCompressedImageCopy) {
1717 TEST_DESCRIPTION("Image copies between compressed and uncompressed images");
1718 ASSERT_NO_FATAL_FAILURE(Init());
1719
1720 // Verify format support
1721 // Size-compatible (64-bit) formats. Uncompressed is 64 bits per texel, compressed is 64 bits per 4x4 block (or 4bpt).
1722 if (!ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_TILING_OPTIMAL,
1723 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR) ||
1724 !ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_TILING_OPTIMAL,
1725 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) {
1726 printf("%s Required formats/features not supported - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
1727 return;
1728 }
1729
1730 VkImageObj uncomp_10x10t_image(m_device); // Size = 10 * 10 * 64 = 6400
1731 VkImageObj comp_10x10b_40x40t_image(m_device); // Size = 40 * 40 * 4 = 6400
1732
1733 uncomp_10x10t_image.Init(10, 10, 1, VK_FORMAT_R16G16B16A16_UINT,
1734 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
1735 comp_10x10b_40x40t_image.Init(40, 40, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
1736 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
1737
1738 if (!uncomp_10x10t_image.initialized() || !comp_10x10b_40x40t_image.initialized()) {
1739 printf("%s Unable to initialize surfaces - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
1740 return;
1741 }
1742
1743 // Both copies represent the same number of bytes. Bytes Per Texel = 1 for bc6, 16 for uncompressed
1744 // Copy compressed to uncompressed
1745 VkImageCopy copy_region = {};
1746 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1747 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1748 copy_region.srcSubresource.mipLevel = 0;
1749 copy_region.dstSubresource.mipLevel = 0;
1750 copy_region.srcSubresource.baseArrayLayer = 0;
1751 copy_region.dstSubresource.baseArrayLayer = 0;
1752 copy_region.srcSubresource.layerCount = 1;
1753 copy_region.dstSubresource.layerCount = 1;
1754 copy_region.srcOffset = {0, 0, 0};
1755 copy_region.dstOffset = {0, 0, 0};
1756
1757 m_errorMonitor->ExpectSuccess();
1758 m_commandBuffer->begin();
1759
1760 // Copy from uncompressed to compressed
1761 copy_region.extent = {10, 10, 1}; // Dimensions in (uncompressed) texels
1762 vkCmdCopyImage(m_commandBuffer->handle(), uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
1763 comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, ©_region);
1764
1765 // And from compressed to uncompressed
1766 copy_region.extent = {40, 40, 1}; // Dimensions in (compressed) texels
1767 vkCmdCopyImage(m_commandBuffer->handle(), comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
1768 uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, ©_region);
1769
1770 m_errorMonitor->VerifyNotFound();
1771 m_commandBuffer->end();
1772 }
1773
TEST_F(VkPositiveLayerTest,DeleteDescriptorSetLayoutsBeforeDescriptorSets)1774 TEST_F(VkPositiveLayerTest, DeleteDescriptorSetLayoutsBeforeDescriptorSets) {
1775 TEST_DESCRIPTION("Create DSLayouts and DescriptorSets and then delete the DSLayouts before the DescriptorSets.");
1776 ASSERT_NO_FATAL_FAILURE(Init());
1777 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1778 VkResult err;
1779
1780 m_errorMonitor->ExpectSuccess();
1781
1782 VkDescriptorPoolSize ds_type_count = {};
1783 ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
1784 ds_type_count.descriptorCount = 1;
1785
1786 VkDescriptorPoolCreateInfo ds_pool_ci = {};
1787 ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1788 ds_pool_ci.pNext = NULL;
1789 ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
1790 ds_pool_ci.maxSets = 1;
1791 ds_pool_ci.poolSizeCount = 1;
1792 ds_pool_ci.pPoolSizes = &ds_type_count;
1793
1794 VkDescriptorPool ds_pool_one;
1795 err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool_one);
1796 ASSERT_VK_SUCCESS(err);
1797
1798 VkDescriptorSetLayoutBinding dsl_binding = {};
1799 dsl_binding.binding = 0;
1800 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
1801 dsl_binding.descriptorCount = 1;
1802 dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
1803 dsl_binding.pImmutableSamplers = NULL;
1804
1805 VkDescriptorSet descriptorSet;
1806 {
1807 const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
1808
1809 VkDescriptorSetAllocateInfo alloc_info = {};
1810 alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
1811 alloc_info.descriptorSetCount = 1;
1812 alloc_info.descriptorPool = ds_pool_one;
1813 alloc_info.pSetLayouts = &ds_layout.handle();
1814 err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
1815 ASSERT_VK_SUCCESS(err);
1816 } // ds_layout destroyed
1817 err = vkFreeDescriptorSets(m_device->device(), ds_pool_one, 1, &descriptorSet);
1818
1819 vkDestroyDescriptorPool(m_device->device(), ds_pool_one, NULL);
1820 m_errorMonitor->VerifyNotFound();
1821 }
1822
TEST_F(VkPositiveLayerTest,CommandPoolDeleteWithReferences)1823 TEST_F(VkPositiveLayerTest, CommandPoolDeleteWithReferences) {
1824 TEST_DESCRIPTION("Ensure the validation layers bookkeeping tracks the implicit command buffer frees.");
1825 ASSERT_NO_FATAL_FAILURE(Init());
1826
1827 VkCommandPoolCreateInfo cmd_pool_info = {};
1828 cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1829 cmd_pool_info.pNext = NULL;
1830 cmd_pool_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
1831 cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1832 cmd_pool_info.flags = 0;
1833
1834 VkCommandPool secondary_cmd_pool;
1835 VkResult res = vkCreateCommandPool(m_device->handle(), &cmd_pool_info, NULL, &secondary_cmd_pool);
1836 ASSERT_VK_SUCCESS(res);
1837
1838 VkCommandBufferAllocateInfo cmdalloc = vk_testing::CommandBuffer::create_info(secondary_cmd_pool);
1839 cmdalloc.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
1840
1841 VkCommandBuffer secondary_cmds;
1842 res = vkAllocateCommandBuffers(m_device->handle(), &cmdalloc, &secondary_cmds);
1843
1844 VkCommandBufferInheritanceInfo cmd_buf_inheritance_info = {};
1845 cmd_buf_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
1846 cmd_buf_inheritance_info.pNext = NULL;
1847 cmd_buf_inheritance_info.renderPass = VK_NULL_HANDLE;
1848 cmd_buf_inheritance_info.subpass = 0;
1849 cmd_buf_inheritance_info.framebuffer = VK_NULL_HANDLE;
1850 cmd_buf_inheritance_info.occlusionQueryEnable = VK_FALSE;
1851 cmd_buf_inheritance_info.queryFlags = 0;
1852 cmd_buf_inheritance_info.pipelineStatistics = 0;
1853
1854 VkCommandBufferBeginInfo secondary_begin = {};
1855 secondary_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1856 secondary_begin.pNext = NULL;
1857 secondary_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1858 secondary_begin.pInheritanceInfo = &cmd_buf_inheritance_info;
1859
1860 res = vkBeginCommandBuffer(secondary_cmds, &secondary_begin);
1861 ASSERT_VK_SUCCESS(res);
1862 vkEndCommandBuffer(secondary_cmds);
1863
1864 m_commandBuffer->begin();
1865 vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_cmds);
1866 m_commandBuffer->end();
1867
1868 // DestroyCommandPool *implicitly* frees the command buffers allocated from it
1869 vkDestroyCommandPool(m_device->handle(), secondary_cmd_pool, NULL);
1870 // If bookkeeping has been lax, validating the reset will attempt to touch deleted data
1871 res = vkResetCommandPool(m_device->handle(), m_commandPool->handle(), 0);
1872 ASSERT_VK_SUCCESS(res);
1873 }
1874
TEST_F(VkPositiveLayerTest,SecondaryCommandBufferClearColorAttachments)1875 TEST_F(VkPositiveLayerTest, SecondaryCommandBufferClearColorAttachments) {
1876 TEST_DESCRIPTION("Create a secondary command buffer and record a CmdClearAttachments call into it");
1877 m_errorMonitor->ExpectSuccess();
1878 ASSERT_NO_FATAL_FAILURE(Init());
1879 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1880
1881 VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
1882 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1883 command_buffer_allocate_info.commandPool = m_commandPool->handle();
1884 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
1885 command_buffer_allocate_info.commandBufferCount = 1;
1886
1887 VkCommandBuffer secondary_command_buffer;
1888 ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
1889 VkCommandBufferBeginInfo command_buffer_begin_info = {};
1890 VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
1891 command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
1892 command_buffer_inheritance_info.renderPass = m_renderPass;
1893 command_buffer_inheritance_info.framebuffer = m_framebuffer;
1894
1895 command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1896 command_buffer_begin_info.flags =
1897 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
1898 command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
1899
1900 vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
1901 VkClearAttachment color_attachment;
1902 color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1903 color_attachment.clearValue.color.float32[0] = 0;
1904 color_attachment.clearValue.color.float32[1] = 0;
1905 color_attachment.clearValue.color.float32[2] = 0;
1906 color_attachment.clearValue.color.float32[3] = 0;
1907 color_attachment.colorAttachment = 0;
1908 VkClearRect clear_rect = {{{0, 0}, {32, 32}}, 0, 1};
1909 vkCmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
1910 vkEndCommandBuffer(secondary_command_buffer);
1911 m_commandBuffer->begin();
1912 vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
1913 vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
1914 vkCmdEndRenderPass(m_commandBuffer->handle());
1915 m_commandBuffer->end();
1916 m_errorMonitor->VerifyNotFound();
1917 }
1918
TEST_F(VkPositiveLayerTest,SecondaryCommandBufferImageLayoutTransitions)1919 TEST_F(VkPositiveLayerTest, SecondaryCommandBufferImageLayoutTransitions) {
1920 TEST_DESCRIPTION("Perform an image layout transition in a secondary command buffer followed by a transition in the primary.");
1921 VkResult err;
1922 m_errorMonitor->ExpectSuccess();
1923 ASSERT_NO_FATAL_FAILURE(Init());
1924 auto depth_format = FindSupportedDepthStencilFormat(gpu());
1925 if (!depth_format) {
1926 printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
1927 return;
1928 }
1929 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1930 // Allocate a secondary and primary cmd buffer
1931 VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
1932 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1933 command_buffer_allocate_info.commandPool = m_commandPool->handle();
1934 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
1935 command_buffer_allocate_info.commandBufferCount = 1;
1936
1937 VkCommandBuffer secondary_command_buffer;
1938 ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
1939 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1940 VkCommandBuffer primary_command_buffer;
1941 ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &primary_command_buffer));
1942 VkCommandBufferBeginInfo command_buffer_begin_info = {};
1943 VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
1944 command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
1945 command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1946 command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1947 command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
1948
1949 err = vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
1950 ASSERT_VK_SUCCESS(err);
1951 VkImageObj image(m_device);
1952 image.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
1953 ASSERT_TRUE(image.initialized());
1954 VkImageMemoryBarrier img_barrier = {};
1955 img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1956 img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
1957 img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
1958 img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1959 img_barrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1960 img_barrier.image = image.handle();
1961 img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1962 img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1963 img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1964 img_barrier.subresourceRange.baseArrayLayer = 0;
1965 img_barrier.subresourceRange.baseMipLevel = 0;
1966 img_barrier.subresourceRange.layerCount = 1;
1967 img_barrier.subresourceRange.levelCount = 1;
1968 vkCmdPipelineBarrier(secondary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
1969 0, nullptr, 1, &img_barrier);
1970 err = vkEndCommandBuffer(secondary_command_buffer);
1971 ASSERT_VK_SUCCESS(err);
1972
1973 // Now update primary cmd buffer to execute secondary and transitions image
1974 command_buffer_begin_info.pInheritanceInfo = nullptr;
1975 err = vkBeginCommandBuffer(primary_command_buffer, &command_buffer_begin_info);
1976 ASSERT_VK_SUCCESS(err);
1977 vkCmdExecuteCommands(primary_command_buffer, 1, &secondary_command_buffer);
1978 VkImageMemoryBarrier img_barrier2 = {};
1979 img_barrier2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1980 img_barrier2.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
1981 img_barrier2.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
1982 img_barrier2.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1983 img_barrier2.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1984 img_barrier2.image = image.handle();
1985 img_barrier2.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1986 img_barrier2.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1987 img_barrier2.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1988 img_barrier2.subresourceRange.baseArrayLayer = 0;
1989 img_barrier2.subresourceRange.baseMipLevel = 0;
1990 img_barrier2.subresourceRange.layerCount = 1;
1991 img_barrier2.subresourceRange.levelCount = 1;
1992 vkCmdPipelineBarrier(primary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr, 0,
1993 nullptr, 1, &img_barrier2);
1994 err = vkEndCommandBuffer(primary_command_buffer);
1995 ASSERT_VK_SUCCESS(err);
1996 VkSubmitInfo submit_info = {};
1997 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1998 submit_info.commandBufferCount = 1;
1999 submit_info.pCommandBuffers = &primary_command_buffer;
2000 err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
2001 ASSERT_VK_SUCCESS(err);
2002 m_errorMonitor->VerifyNotFound();
2003 err = vkDeviceWaitIdle(m_device->device());
2004 ASSERT_VK_SUCCESS(err);
2005 vkFreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &secondary_command_buffer);
2006 vkFreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &primary_command_buffer);
2007 }
2008
2009 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,IgnoreUnrelatedDescriptor)2010 TEST_F(VkPositiveLayerTest, IgnoreUnrelatedDescriptor) {
2011 TEST_DESCRIPTION(
2012 "Ensure that the vkUpdateDescriptorSets validation code is ignoring VkWriteDescriptorSet members that are not related to "
2013 "the descriptor type specified by VkWriteDescriptorSet::descriptorType. Correct validation behavior will result in the "
2014 "test running to completion without validation errors.");
2015
2016 const uintptr_t invalid_ptr = 0xcdcdcdcd;
2017
2018 ASSERT_NO_FATAL_FAILURE(Init());
2019
2020 // Verify VK_FORMAT_R8_UNORM supports VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
2021 const VkFormat format_texel_case = VK_FORMAT_R8_UNORM;
2022 const char *format_texel_case_string = "VK_FORMAT_R8_UNORM";
2023 VkFormatProperties format_properties;
2024 vkGetPhysicalDeviceFormatProperties(gpu(), format_texel_case, &format_properties);
2025 if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
2026 printf("%s Test requires %s to support VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n", kSkipPrefix, format_texel_case_string);
2027 return;
2028 }
2029
2030 // Image Case
2031 {
2032 m_errorMonitor->ExpectSuccess();
2033
2034 VkImageObj image(m_device);
2035 image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
2036
2037 VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
2038
2039 OneOffDescriptorSet descriptor_set(m_device, {
2040 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
2041 });
2042
2043 VkDescriptorImageInfo image_info = {};
2044 image_info.imageView = view;
2045 image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2046
2047 VkWriteDescriptorSet descriptor_write;
2048 memset(&descriptor_write, 0, sizeof(descriptor_write));
2049 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2050 descriptor_write.dstSet = descriptor_set.set_;
2051 descriptor_write.dstBinding = 0;
2052 descriptor_write.descriptorCount = 1;
2053 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
2054 descriptor_write.pImageInfo = &image_info;
2055
2056 // Set pBufferInfo and pTexelBufferView to invalid values, which should
2057 // be
2058 // ignored for descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE.
2059 // This will most likely produce a crash if the parameter_validation
2060 // layer
2061 // does not correctly ignore pBufferInfo.
2062 descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
2063 descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
2064
2065 vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
2066
2067 m_errorMonitor->VerifyNotFound();
2068 }
2069
2070 // Buffer Case
2071 {
2072 m_errorMonitor->ExpectSuccess();
2073
2074 uint32_t queue_family_index = 0;
2075 VkBufferCreateInfo buffer_create_info = {};
2076 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2077 buffer_create_info.size = 1024;
2078 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
2079 buffer_create_info.queueFamilyIndexCount = 1;
2080 buffer_create_info.pQueueFamilyIndices = &queue_family_index;
2081
2082 VkBufferObj buffer;
2083 buffer.init(*m_device, buffer_create_info);
2084
2085 OneOffDescriptorSet descriptor_set(m_device, {
2086 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
2087 });
2088
2089 VkDescriptorBufferInfo buffer_info = {};
2090 buffer_info.buffer = buffer.handle();
2091 buffer_info.offset = 0;
2092 buffer_info.range = 1024;
2093
2094 VkWriteDescriptorSet descriptor_write;
2095 memset(&descriptor_write, 0, sizeof(descriptor_write));
2096 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2097 descriptor_write.dstSet = descriptor_set.set_;
2098 descriptor_write.dstBinding = 0;
2099 descriptor_write.descriptorCount = 1;
2100 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2101 descriptor_write.pBufferInfo = &buffer_info;
2102
2103 // Set pImageInfo and pTexelBufferView to invalid values, which should
2104 // be
2105 // ignored for descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER.
2106 // This will most likely produce a crash if the parameter_validation
2107 // layer
2108 // does not correctly ignore pImageInfo.
2109 descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
2110 descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
2111
2112 vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
2113
2114 m_errorMonitor->VerifyNotFound();
2115 }
2116
2117 // Texel Buffer Case
2118 {
2119 m_errorMonitor->ExpectSuccess();
2120
2121 uint32_t queue_family_index = 0;
2122 VkBufferCreateInfo buffer_create_info = {};
2123 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2124 buffer_create_info.size = 1024;
2125 buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
2126 buffer_create_info.queueFamilyIndexCount = 1;
2127 buffer_create_info.pQueueFamilyIndices = &queue_family_index;
2128
2129 VkBufferObj buffer;
2130 buffer.init(*m_device, buffer_create_info);
2131
2132 VkBufferViewCreateInfo buff_view_ci = {};
2133 buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
2134 buff_view_ci.buffer = buffer.handle();
2135 buff_view_ci.format = format_texel_case;
2136 buff_view_ci.range = VK_WHOLE_SIZE;
2137 VkBufferView buffer_view;
2138 VkResult err = vkCreateBufferView(m_device->device(), &buff_view_ci, NULL, &buffer_view);
2139 ASSERT_VK_SUCCESS(err);
2140 OneOffDescriptorSet descriptor_set(m_device,
2141 {
2142 {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
2143 });
2144
2145 VkWriteDescriptorSet descriptor_write;
2146 memset(&descriptor_write, 0, sizeof(descriptor_write));
2147 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2148 descriptor_write.dstSet = descriptor_set.set_;
2149 descriptor_write.dstBinding = 0;
2150 descriptor_write.descriptorCount = 1;
2151 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
2152 descriptor_write.pTexelBufferView = &buffer_view;
2153
2154 // Set pImageInfo and pBufferInfo to invalid values, which should be
2155 // ignored for descriptorType ==
2156 // VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER.
2157 // This will most likely produce a crash if the parameter_validation
2158 // layer
2159 // does not correctly ignore pImageInfo and pBufferInfo.
2160 descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
2161 descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
2162
2163 vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
2164
2165 m_errorMonitor->VerifyNotFound();
2166
2167 vkDestroyBufferView(m_device->device(), buffer_view, NULL);
2168 }
2169 }
2170
TEST_F(VkPositiveLayerTest,ImmutableSamplerOnlyDescriptor)2171 TEST_F(VkPositiveLayerTest, ImmutableSamplerOnlyDescriptor) {
2172 TEST_DESCRIPTION("Bind a DescriptorSet with only an immutable sampler and make sure that we don't warn for no update.");
2173
2174 ASSERT_NO_FATAL_FAILURE(Init());
2175 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2176
2177 OneOffDescriptorSet descriptor_set(m_device, {
2178 {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
2179 });
2180
2181 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
2182 VkSampler sampler;
2183 VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
2184 ASSERT_VK_SUCCESS(err);
2185
2186 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
2187
2188 m_errorMonitor->ExpectSuccess();
2189 m_commandBuffer->begin();
2190 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2191
2192 vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
2193 &descriptor_set.set_, 0, nullptr);
2194 m_errorMonitor->VerifyNotFound();
2195
2196 vkDestroySampler(m_device->device(), sampler, NULL);
2197
2198 m_commandBuffer->EndRenderPass();
2199 m_commandBuffer->end();
2200 }
2201
2202 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,EmptyDescriptorUpdateTest)2203 TEST_F(VkPositiveLayerTest, EmptyDescriptorUpdateTest) {
2204 TEST_DESCRIPTION("Update last descriptor in a set that includes an empty binding");
2205 VkResult err;
2206
2207 ASSERT_NO_FATAL_FAILURE(Init());
2208 m_errorMonitor->ExpectSuccess();
2209
2210 // Create layout with two uniform buffer descriptors w/ empty binding between them
2211 OneOffDescriptorSet ds(m_device, {
2212 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
2213 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0 /*!*/, 0, nullptr},
2214 {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
2215 });
2216
2217 // Create a buffer to be used for update
2218 VkBufferCreateInfo buff_ci = {};
2219 buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2220 buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
2221 buff_ci.size = 256;
2222 buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2223 VkBuffer buffer;
2224 err = vkCreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
2225 ASSERT_VK_SUCCESS(err);
2226 // Have to bind memory to buffer before descriptor update
2227 VkMemoryAllocateInfo mem_alloc = {};
2228 mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
2229 mem_alloc.pNext = NULL;
2230 mem_alloc.allocationSize = 512; // one allocation for both buffers
2231 mem_alloc.memoryTypeIndex = 0;
2232
2233 VkMemoryRequirements mem_reqs;
2234 vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
2235 bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
2236 if (!pass) {
2237 printf("%s Failed to allocate memory.\n", kSkipPrefix);
2238 vkDestroyBuffer(m_device->device(), buffer, NULL);
2239 return;
2240 }
2241 // Make sure allocation is sufficiently large to accommodate buffer requirements
2242 if (mem_reqs.size > mem_alloc.allocationSize) {
2243 mem_alloc.allocationSize = mem_reqs.size;
2244 }
2245
2246 VkDeviceMemory mem;
2247 err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
2248 ASSERT_VK_SUCCESS(err);
2249 err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
2250 ASSERT_VK_SUCCESS(err);
2251
2252 // Only update the descriptor at binding 2
2253 VkDescriptorBufferInfo buff_info = {};
2254 buff_info.buffer = buffer;
2255 buff_info.offset = 0;
2256 buff_info.range = VK_WHOLE_SIZE;
2257 VkWriteDescriptorSet descriptor_write = {};
2258 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2259 descriptor_write.dstBinding = 2;
2260 descriptor_write.descriptorCount = 1;
2261 descriptor_write.pTexelBufferView = nullptr;
2262 descriptor_write.pBufferInfo = &buff_info;
2263 descriptor_write.pImageInfo = nullptr;
2264 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2265 descriptor_write.dstSet = ds.set_;
2266
2267 vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
2268
2269 m_errorMonitor->VerifyNotFound();
2270 // Cleanup
2271 vkFreeMemory(m_device->device(), mem, NULL);
2272 vkDestroyBuffer(m_device->device(), buffer, NULL);
2273 }
2274
2275 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,PushDescriptorNullDstSetTest)2276 TEST_F(VkPositiveLayerTest, PushDescriptorNullDstSetTest) {
2277 TEST_DESCRIPTION("Use null dstSet in CmdPushDescriptorSetKHR");
2278
2279 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
2280 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2281 } else {
2282 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
2283 return;
2284 }
2285 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
2286 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
2287 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
2288 } else {
2289 printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
2290 return;
2291 }
2292 ASSERT_NO_FATAL_FAILURE(InitState());
2293 m_errorMonitor->ExpectSuccess();
2294
2295 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
2296 if (push_descriptor_prop.maxPushDescriptors < 1) {
2297 // Some implementations report an invalid maxPushDescriptors of 0
2298 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
2299 return;
2300 }
2301
2302 ASSERT_NO_FATAL_FAILURE(InitViewport());
2303 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2304
2305 VkDescriptorSetLayoutBinding dsl_binding = {};
2306 dsl_binding.binding = 2;
2307 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2308 dsl_binding.descriptorCount = 1;
2309 dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
2310 dsl_binding.pImmutableSamplers = NULL;
2311
2312 const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
2313 // Create push descriptor set layout
2314 const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
2315
2316 // Use helper to create graphics pipeline
2317 CreatePipelineHelper helper(*this);
2318 helper.InitInfo();
2319 helper.InitState();
2320 helper.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &ds_layout});
2321 helper.CreateGraphicsPipeline();
2322
2323 const float vbo_data[3] = {1.f, 0.f, 1.f};
2324 VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
2325
2326 VkDescriptorBufferInfo buff_info;
2327 buff_info.buffer = vbo.handle();
2328 buff_info.offset = 0;
2329 buff_info.range = sizeof(vbo_data);
2330 VkWriteDescriptorSet descriptor_write = {};
2331 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2332 descriptor_write.dstBinding = 2;
2333 descriptor_write.descriptorCount = 1;
2334 descriptor_write.pTexelBufferView = nullptr;
2335 descriptor_write.pBufferInfo = &buff_info;
2336 descriptor_write.pImageInfo = nullptr;
2337 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2338 descriptor_write.dstSet = 0; // Should not cause a validation error
2339
2340 // Find address of extension call and make the call
2341 PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
2342 (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
2343 assert(vkCmdPushDescriptorSetKHR != nullptr);
2344
2345 m_commandBuffer->begin();
2346
2347 // In Intel GPU, it needs to bind pipeline before push descriptor set.
2348 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
2349 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_layout_.handle(), 0, 1,
2350 &descriptor_write);
2351
2352 m_errorMonitor->VerifyNotFound();
2353 }
2354
2355 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,PushDescriptorUnboundSetTest)2356 TEST_F(VkPositiveLayerTest, PushDescriptorUnboundSetTest) {
2357 TEST_DESCRIPTION("Ensure that no validation errors are produced for not bound push descriptor sets");
2358 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
2359 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2360 } else {
2361 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
2362 return;
2363 }
2364 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
2365 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
2366 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
2367 } else {
2368 printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
2369 return;
2370 }
2371 ASSERT_NO_FATAL_FAILURE(InitState());
2372
2373 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
2374 if (push_descriptor_prop.maxPushDescriptors < 1) {
2375 // Some implementations report an invalid maxPushDescriptors of 0
2376 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
2377 return;
2378 }
2379
2380 ASSERT_NO_FATAL_FAILURE(InitViewport());
2381 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2382 m_errorMonitor->ExpectSuccess();
2383
2384 // Create descriptor set layout
2385 VkDescriptorSetLayoutBinding dsl_binding = {};
2386 dsl_binding.binding = 2;
2387 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2388 dsl_binding.descriptorCount = 1;
2389 dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
2390 dsl_binding.pImmutableSamplers = NULL;
2391
2392 OneOffDescriptorSet descriptor_set(m_device, {dsl_binding}, 0, nullptr, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
2393 nullptr);
2394
2395 // Create push descriptor set layout
2396 const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
2397
2398 // Create PSO
2399 char const fsSource[] =
2400 "#version 450\n"
2401 "\n"
2402 "layout(location=0) out vec4 x;\n"
2403 "layout(set=0) layout(binding=2) uniform foo1 { float x; } bar1;\n"
2404 "layout(set=1) layout(binding=2) uniform foo2 { float y; } bar2;\n"
2405 "void main(){\n"
2406 " x = vec4(bar1.x) + vec4(bar2.y);\n"
2407 "}\n";
2408 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
2409 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2410 CreatePipelineHelper pipe(*this);
2411 pipe.InitInfo();
2412 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2413 pipe.InitState();
2414 // Now use the descriptor layouts to create a pipeline layout
2415 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &descriptor_set.layout_});
2416 pipe.CreateGraphicsPipeline();
2417
2418 const float bo_data[1] = {1.f};
2419 VkConstantBufferObj buffer(m_device, sizeof(bo_data), (const void *)&bo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
2420
2421 // Update descriptor set
2422 descriptor_set.WriteDescriptorBufferInfo(2, buffer.handle(), sizeof(bo_data));
2423 descriptor_set.UpdateDescriptorSets();
2424
2425 PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
2426 (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
2427 assert(vkCmdPushDescriptorSetKHR != nullptr);
2428
2429 m_commandBuffer->begin();
2430 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2431 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
2432
2433 // Push descriptors and bind descriptor set
2434 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
2435 descriptor_set.descriptor_writes.data());
2436 vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 1, 1,
2437 &descriptor_set.set_, 0, NULL);
2438
2439 // No errors should be generated.
2440 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2441
2442 m_errorMonitor->VerifyNotFound();
2443
2444 m_commandBuffer->EndRenderPass();
2445 m_commandBuffer->end();
2446 }
2447
TEST_F(VkPositiveLayerTest,PushDescriptorSetUpdatingSetNumber)2448 TEST_F(VkPositiveLayerTest, PushDescriptorSetUpdatingSetNumber) {
2449 TEST_DESCRIPTION(
2450 "Ensure that no validation errors are produced when the push descriptor set number changes "
2451 "between two vkCmdPushDescriptorSetKHR calls.");
2452
2453 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
2454 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2455 } else {
2456 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
2457 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2458 return;
2459 }
2460
2461 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
2462 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
2463 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
2464 } else {
2465 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
2466 return;
2467 }
2468 ASSERT_NO_FATAL_FAILURE(InitState());
2469 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
2470 if (push_descriptor_prop.maxPushDescriptors < 1) {
2471 // Some implementations report an invalid maxPushDescriptors of 0
2472 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
2473 return;
2474 }
2475 ASSERT_NO_FATAL_FAILURE(InitViewport());
2476 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2477 m_errorMonitor->ExpectSuccess();
2478
2479 // Create a descriptor to push
2480 const uint32_t buffer_data[4] = {4, 5, 6, 7};
2481 VkConstantBufferObj buffer_obj(
2482 m_device, sizeof(buffer_data), &buffer_data,
2483 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
2484 ASSERT_TRUE(buffer_obj.initialized());
2485
2486 VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
2487
2488 PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
2489 (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
2490 ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
2491
2492 const VkDescriptorSetLayoutBinding ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
2493 nullptr};
2494 const VkDescriptorSetLayoutBinding ds_binding_1 = {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
2495 nullptr};
2496 const VkDescriptorSetLayoutObj ds_layout(m_device, {ds_binding_0, ds_binding_1});
2497 ASSERT_TRUE(ds_layout.initialized());
2498
2499 const VkDescriptorSetLayoutBinding push_ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
2500 nullptr};
2501 const VkDescriptorSetLayoutObj push_ds_layout(m_device, {push_ds_binding_0},
2502 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
2503 ASSERT_TRUE(push_ds_layout.initialized());
2504
2505 m_commandBuffer->begin();
2506 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2507
2508 VkPipelineObj pipe0(m_device);
2509 VkPipelineObj pipe1(m_device);
2510 {
2511 // Note: the push descriptor set is set number 2.
2512 const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &push_ds_layout, &ds_layout});
2513 ASSERT_TRUE(pipeline_layout.initialized());
2514
2515 char const *fsSource =
2516 "#version 450\n"
2517 "\n"
2518 "layout(location=0) out vec4 x;\n"
2519 "layout(set=2) layout(binding=0) uniform foo { vec4 y; } bar;\n"
2520 "void main(){\n"
2521 " x = bar.y;\n"
2522 "}\n";
2523
2524 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
2525 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2526 VkPipelineObj &pipe = pipe0;
2527 pipe.SetViewport(m_viewports);
2528 pipe.SetScissor(m_scissors);
2529 pipe.AddShader(&vs);
2530 pipe.AddShader(&fs);
2531 pipe.AddDefaultColorAttachment();
2532 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
2533
2534 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
2535
2536 const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
2537 vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
2538
2539 // Note: pushing to desciptor set number 2.
2540 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
2541 &descriptor_write);
2542 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2543 }
2544
2545 m_errorMonitor->VerifyNotFound();
2546
2547 {
2548 // Note: the push descriptor set is now set number 3.
2549 const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &ds_layout, &push_ds_layout});
2550 ASSERT_TRUE(pipeline_layout.initialized());
2551
2552 const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
2553 vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
2554
2555 char const *fsSource =
2556 "#version 450\n"
2557 "\n"
2558 "layout(location=0) out vec4 x;\n"
2559 "layout(set=3) layout(binding=0) uniform foo { vec4 y; } bar;\n"
2560 "void main(){\n"
2561 " x = bar.y;\n"
2562 "}\n";
2563
2564 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
2565 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2566 VkPipelineObj &pipe = pipe1;
2567 pipe.SetViewport(m_viewports);
2568 pipe.SetScissor(m_scissors);
2569 pipe.AddShader(&vs);
2570 pipe.AddShader(&fs);
2571 pipe.AddDefaultColorAttachment();
2572 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
2573
2574 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
2575
2576 // Note: now pushing to desciptor set number 3.
2577 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 3, 1,
2578 &descriptor_write);
2579 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2580 }
2581
2582 m_errorMonitor->VerifyNotFound();
2583
2584 m_commandBuffer->EndRenderPass();
2585 m_commandBuffer->end();
2586 }
2587
2588 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,TestAliasedMemoryTracking)2589 TEST_F(VkPositiveLayerTest, TestAliasedMemoryTracking) {
2590 VkResult err;
2591 bool pass;
2592
2593 TEST_DESCRIPTION(
2594 "Create a buffer, allocate memory, bind memory, destroy the buffer, create an image, and bind the same memory to it");
2595
2596 m_errorMonitor->ExpectSuccess();
2597
2598 ASSERT_NO_FATAL_FAILURE(Init());
2599
2600 VkBuffer buffer;
2601 VkImage image;
2602 VkDeviceMemory mem;
2603 VkMemoryRequirements mem_reqs;
2604
2605 VkBufferCreateInfo buf_info = {};
2606 buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2607 buf_info.pNext = NULL;
2608 buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
2609 buf_info.size = 256;
2610 buf_info.queueFamilyIndexCount = 0;
2611 buf_info.pQueueFamilyIndices = NULL;
2612 buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2613 buf_info.flags = 0;
2614 err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
2615 ASSERT_VK_SUCCESS(err);
2616
2617 vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
2618
2619 VkMemoryAllocateInfo alloc_info = {};
2620 alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
2621 alloc_info.pNext = NULL;
2622 alloc_info.memoryTypeIndex = 0;
2623
2624 // Ensure memory is big enough for both bindings
2625 alloc_info.allocationSize = 0x10000;
2626
2627 pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2628 if (!pass) {
2629 printf("%s Failed to allocate memory.\n", kSkipPrefix);
2630 vkDestroyBuffer(m_device->device(), buffer, NULL);
2631 return;
2632 }
2633
2634 err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
2635 ASSERT_VK_SUCCESS(err);
2636
2637 uint8_t *pData;
2638 err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
2639 ASSERT_VK_SUCCESS(err);
2640
2641 memset(pData, 0xCADECADE, static_cast<size_t>(mem_reqs.size));
2642
2643 vkUnmapMemory(m_device->device(), mem);
2644
2645 err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
2646 ASSERT_VK_SUCCESS(err);
2647
2648 // NOW, destroy the buffer. Obviously, the resource no longer occupies this
2649 // memory. In fact, it was never used by the GPU.
2650 // Just be sure, wait for idle.
2651 vkDestroyBuffer(m_device->device(), buffer, NULL);
2652 vkDeviceWaitIdle(m_device->device());
2653
2654 // Use optimal as some platforms report linear support but then fail image creation
2655 VkImageTiling image_tiling = VK_IMAGE_TILING_OPTIMAL;
2656 VkImageFormatProperties image_format_properties;
2657 vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, image_tiling,
2658 VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0, &image_format_properties);
2659 if (image_format_properties.maxExtent.width == 0) {
2660 printf("%s Image format not supported; skipped.\n", kSkipPrefix);
2661 vkFreeMemory(m_device->device(), mem, NULL);
2662 return;
2663 }
2664 VkImageCreateInfo image_create_info = {};
2665 image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
2666 image_create_info.pNext = NULL;
2667 image_create_info.imageType = VK_IMAGE_TYPE_2D;
2668 image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
2669 image_create_info.extent.width = 64;
2670 image_create_info.extent.height = 64;
2671 image_create_info.extent.depth = 1;
2672 image_create_info.mipLevels = 1;
2673 image_create_info.arrayLayers = 1;
2674 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
2675 image_create_info.tiling = image_tiling;
2676 image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2677 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2678 image_create_info.queueFamilyIndexCount = 0;
2679 image_create_info.pQueueFamilyIndices = NULL;
2680 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2681 image_create_info.flags = 0;
2682
2683 /* Create a mappable image. It will be the texture if linear images are OK
2684 * to be textures or it will be the staging image if they are not.
2685 */
2686 err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
2687 ASSERT_VK_SUCCESS(err);
2688
2689 vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
2690
2691 VkMemoryAllocateInfo mem_alloc = {};
2692 mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
2693 mem_alloc.pNext = NULL;
2694 mem_alloc.allocationSize = 0;
2695 mem_alloc.memoryTypeIndex = 0;
2696 mem_alloc.allocationSize = mem_reqs.size;
2697
2698 pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2699 if (!pass) {
2700 printf("%s Failed to allocate memory.\n", kSkipPrefix);
2701 vkFreeMemory(m_device->device(), mem, NULL);
2702 vkDestroyImage(m_device->device(), image, NULL);
2703 return;
2704 }
2705
2706 // VALIDATION FAILURE:
2707 err = vkBindImageMemory(m_device->device(), image, mem, 0);
2708 ASSERT_VK_SUCCESS(err);
2709
2710 m_errorMonitor->VerifyNotFound();
2711
2712 vkFreeMemory(m_device->device(), mem, NULL);
2713 vkDestroyImage(m_device->device(), image, NULL);
2714 }
2715
2716 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,TestDestroyFreeNullHandles)2717 TEST_F(VkPositiveLayerTest, TestDestroyFreeNullHandles) {
2718 VkResult err;
2719
2720 TEST_DESCRIPTION("Call all applicable destroy and free routines with NULL handles, expecting no validation errors");
2721
2722 m_errorMonitor->ExpectSuccess();
2723
2724 ASSERT_NO_FATAL_FAILURE(Init());
2725 vkDestroyBuffer(m_device->device(), VK_NULL_HANDLE, NULL);
2726 vkDestroyBufferView(m_device->device(), VK_NULL_HANDLE, NULL);
2727 vkDestroyCommandPool(m_device->device(), VK_NULL_HANDLE, NULL);
2728 vkDestroyDescriptorPool(m_device->device(), VK_NULL_HANDLE, NULL);
2729 vkDestroyDescriptorSetLayout(m_device->device(), VK_NULL_HANDLE, NULL);
2730 vkDestroyDevice(VK_NULL_HANDLE, NULL);
2731 vkDestroyEvent(m_device->device(), VK_NULL_HANDLE, NULL);
2732 vkDestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
2733 vkDestroyFramebuffer(m_device->device(), VK_NULL_HANDLE, NULL);
2734 vkDestroyImage(m_device->device(), VK_NULL_HANDLE, NULL);
2735 vkDestroyImageView(m_device->device(), VK_NULL_HANDLE, NULL);
2736 vkDestroyInstance(VK_NULL_HANDLE, NULL);
2737 vkDestroyPipeline(m_device->device(), VK_NULL_HANDLE, NULL);
2738 vkDestroyPipelineCache(m_device->device(), VK_NULL_HANDLE, NULL);
2739 vkDestroyPipelineLayout(m_device->device(), VK_NULL_HANDLE, NULL);
2740 vkDestroyQueryPool(m_device->device(), VK_NULL_HANDLE, NULL);
2741 vkDestroyRenderPass(m_device->device(), VK_NULL_HANDLE, NULL);
2742 vkDestroySampler(m_device->device(), VK_NULL_HANDLE, NULL);
2743 vkDestroySemaphore(m_device->device(), VK_NULL_HANDLE, NULL);
2744 vkDestroyShaderModule(m_device->device(), VK_NULL_HANDLE, NULL);
2745
2746 VkCommandPool command_pool;
2747 VkCommandPoolCreateInfo pool_create_info{};
2748 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
2749 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
2750 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2751 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
2752 VkCommandBuffer command_buffers[3] = {};
2753 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
2754 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
2755 command_buffer_allocate_info.commandPool = command_pool;
2756 command_buffer_allocate_info.commandBufferCount = 1;
2757 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2758 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffers[1]);
2759 vkFreeCommandBuffers(m_device->device(), command_pool, 3, command_buffers);
2760 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
2761
2762 VkDescriptorPoolSize ds_type_count = {};
2763 ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
2764 ds_type_count.descriptorCount = 1;
2765
2766 VkDescriptorPoolCreateInfo ds_pool_ci = {};
2767 ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
2768 ds_pool_ci.pNext = NULL;
2769 ds_pool_ci.maxSets = 1;
2770 ds_pool_ci.poolSizeCount = 1;
2771 ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2772 ds_pool_ci.pPoolSizes = &ds_type_count;
2773
2774 VkDescriptorPool ds_pool;
2775 err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
2776 ASSERT_VK_SUCCESS(err);
2777
2778 VkDescriptorSetLayoutBinding dsl_binding = {};
2779 dsl_binding.binding = 2;
2780 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
2781 dsl_binding.descriptorCount = 1;
2782 dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
2783 dsl_binding.pImmutableSamplers = NULL;
2784
2785 const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
2786
2787 VkDescriptorSet descriptor_sets[3] = {};
2788 VkDescriptorSetAllocateInfo alloc_info = {};
2789 alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
2790 alloc_info.descriptorSetCount = 1;
2791 alloc_info.descriptorPool = ds_pool;
2792 alloc_info.pSetLayouts = &ds_layout.handle();
2793 err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_sets[1]);
2794 ASSERT_VK_SUCCESS(err);
2795 vkFreeDescriptorSets(m_device->device(), ds_pool, 3, descriptor_sets);
2796 vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
2797
2798 vkFreeMemory(m_device->device(), VK_NULL_HANDLE, NULL);
2799
2800 m_errorMonitor->VerifyNotFound();
2801 }
2802
TEST_F(VkPositiveLayerTest,QueueSubmitSemaphoresAndLayoutTracking)2803 TEST_F(VkPositiveLayerTest, QueueSubmitSemaphoresAndLayoutTracking) {
2804 TEST_DESCRIPTION("Submit multiple command buffers with chained semaphore signals and layout transitions");
2805
2806 m_errorMonitor->ExpectSuccess();
2807
2808 ASSERT_NO_FATAL_FAILURE(Init());
2809 VkCommandBuffer cmd_bufs[4];
2810 VkCommandBufferAllocateInfo alloc_info;
2811 alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
2812 alloc_info.pNext = NULL;
2813 alloc_info.commandBufferCount = 4;
2814 alloc_info.commandPool = m_commandPool->handle();
2815 alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2816 vkAllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
2817 VkImageObj image(m_device);
2818 image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM,
2819 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
2820 VK_IMAGE_TILING_OPTIMAL, 0);
2821 ASSERT_TRUE(image.initialized());
2822 VkCommandBufferBeginInfo cb_binfo;
2823 cb_binfo.pNext = NULL;
2824 cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2825 cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
2826 cb_binfo.flags = 0;
2827 // Use 4 command buffers, each with an image layout transition, ColorAO->General->ColorAO->TransferSrc->TransferDst
2828 vkBeginCommandBuffer(cmd_bufs[0], &cb_binfo);
2829 VkImageMemoryBarrier img_barrier = {};
2830 img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2831 img_barrier.pNext = NULL;
2832 img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
2833 img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
2834 img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2835 img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
2836 img_barrier.image = image.handle();
2837 img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2838 img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2839 img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2840 img_barrier.subresourceRange.baseArrayLayer = 0;
2841 img_barrier.subresourceRange.baseMipLevel = 0;
2842 img_barrier.subresourceRange.layerCount = 1;
2843 img_barrier.subresourceRange.levelCount = 1;
2844 vkCmdPipelineBarrier(cmd_bufs[0], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
2845 &img_barrier);
2846 vkEndCommandBuffer(cmd_bufs[0]);
2847 vkBeginCommandBuffer(cmd_bufs[1], &cb_binfo);
2848 img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
2849 img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2850 vkCmdPipelineBarrier(cmd_bufs[1], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
2851 &img_barrier);
2852 vkEndCommandBuffer(cmd_bufs[1]);
2853 vkBeginCommandBuffer(cmd_bufs[2], &cb_binfo);
2854 img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2855 img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2856 vkCmdPipelineBarrier(cmd_bufs[2], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
2857 &img_barrier);
2858 vkEndCommandBuffer(cmd_bufs[2]);
2859 vkBeginCommandBuffer(cmd_bufs[3], &cb_binfo);
2860 img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2861 img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2862 vkCmdPipelineBarrier(cmd_bufs[3], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
2863 &img_barrier);
2864 vkEndCommandBuffer(cmd_bufs[3]);
2865
2866 // Submit 4 command buffers in 3 submits, with submits 2 and 3 waiting for semaphores from submits 1 and 2
2867 VkSemaphore semaphore1, semaphore2;
2868 VkSemaphoreCreateInfo semaphore_create_info{};
2869 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
2870 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore1);
2871 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore2);
2872 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
2873 VkSubmitInfo submit_info[3];
2874 submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2875 submit_info[0].pNext = nullptr;
2876 submit_info[0].commandBufferCount = 1;
2877 submit_info[0].pCommandBuffers = &cmd_bufs[0];
2878 submit_info[0].signalSemaphoreCount = 1;
2879 submit_info[0].pSignalSemaphores = &semaphore1;
2880 submit_info[0].waitSemaphoreCount = 0;
2881 submit_info[0].pWaitDstStageMask = nullptr;
2882 submit_info[0].pWaitDstStageMask = flags;
2883 submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2884 submit_info[1].pNext = nullptr;
2885 submit_info[1].commandBufferCount = 1;
2886 submit_info[1].pCommandBuffers = &cmd_bufs[1];
2887 submit_info[1].waitSemaphoreCount = 1;
2888 submit_info[1].pWaitSemaphores = &semaphore1;
2889 submit_info[1].signalSemaphoreCount = 1;
2890 submit_info[1].pSignalSemaphores = &semaphore2;
2891 submit_info[1].pWaitDstStageMask = flags;
2892 submit_info[2].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2893 submit_info[2].pNext = nullptr;
2894 submit_info[2].commandBufferCount = 2;
2895 submit_info[2].pCommandBuffers = &cmd_bufs[2];
2896 submit_info[2].waitSemaphoreCount = 1;
2897 submit_info[2].pWaitSemaphores = &semaphore2;
2898 submit_info[2].signalSemaphoreCount = 0;
2899 submit_info[2].pSignalSemaphores = nullptr;
2900 submit_info[2].pWaitDstStageMask = flags;
2901 vkQueueSubmit(m_device->m_queue, 3, submit_info, VK_NULL_HANDLE);
2902 vkQueueWaitIdle(m_device->m_queue);
2903
2904 vkDestroySemaphore(m_device->device(), semaphore1, NULL);
2905 vkDestroySemaphore(m_device->device(), semaphore2, NULL);
2906 m_errorMonitor->VerifyNotFound();
2907 }
2908
TEST_F(VkPositiveLayerTest,DynamicOffsetWithInactiveBinding)2909 TEST_F(VkPositiveLayerTest, DynamicOffsetWithInactiveBinding) {
2910 // Create a descriptorSet w/ dynamic descriptors where 1 binding is inactive
2911 // We previously had a bug where dynamic offset of inactive bindings was still being used
2912 m_errorMonitor->ExpectSuccess();
2913
2914 ASSERT_NO_FATAL_FAILURE(Init());
2915 ASSERT_NO_FATAL_FAILURE(InitViewport());
2916 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2917
2918 OneOffDescriptorSet descriptor_set(m_device,
2919 {
2920 {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
2921 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
2922 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
2923 });
2924
2925 // Create two buffers to update the descriptors with
2926 // The first will be 2k and used for bindings 0 & 1, the second is 1k for binding 2
2927 uint32_t qfi = 0;
2928 VkBufferCreateInfo buffCI = {};
2929 buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2930 buffCI.size = 2048;
2931 buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
2932 buffCI.queueFamilyIndexCount = 1;
2933 buffCI.pQueueFamilyIndices = &qfi;
2934
2935 VkBufferObj dynamic_uniform_buffer_1, dynamic_uniform_buffer_2;
2936 dynamic_uniform_buffer_1.init(*m_device, buffCI);
2937 buffCI.size = 1024;
2938 dynamic_uniform_buffer_2.init(*m_device, buffCI);
2939
2940 // Update descriptors
2941 const uint32_t BINDING_COUNT = 3;
2942 VkDescriptorBufferInfo buff_info[BINDING_COUNT] = {};
2943 buff_info[0].buffer = dynamic_uniform_buffer_1.handle();
2944 buff_info[0].offset = 0;
2945 buff_info[0].range = 256;
2946 buff_info[1].buffer = dynamic_uniform_buffer_1.handle();
2947 buff_info[1].offset = 256;
2948 buff_info[1].range = 512;
2949 buff_info[2].buffer = dynamic_uniform_buffer_2.handle();
2950 buff_info[2].offset = 0;
2951 buff_info[2].range = 512;
2952
2953 VkWriteDescriptorSet descriptor_write;
2954 memset(&descriptor_write, 0, sizeof(descriptor_write));
2955 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2956 descriptor_write.dstSet = descriptor_set.set_;
2957 descriptor_write.dstBinding = 0;
2958 descriptor_write.descriptorCount = BINDING_COUNT;
2959 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
2960 descriptor_write.pBufferInfo = buff_info;
2961
2962 vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
2963
2964 m_commandBuffer->begin();
2965 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2966
2967 // Create PSO to be used for draw-time errors below
2968 char const *fsSource =
2969 "#version 450\n"
2970 "\n"
2971 "layout(location=0) out vec4 x;\n"
2972 "layout(set=0) layout(binding=0) uniform foo1 { int x; int y; } bar1;\n"
2973 "layout(set=0) layout(binding=2) uniform foo2 { int x; int y; } bar2;\n"
2974 "void main(){\n"
2975 " x = vec4(bar1.y) + vec4(bar2.y);\n"
2976 "}\n";
2977 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
2978 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2979
2980 CreatePipelineHelper pipe(*this);
2981 pipe.InitInfo();
2982 pipe.InitState();
2983 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2984 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
2985 pipe.CreateGraphicsPipeline();
2986
2987 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
2988 // This update should succeed, but offset of inactive binding 1 oversteps binding 2 buffer size
2989 // we used to have a bug in this case.
2990 uint32_t dyn_off[BINDING_COUNT] = {0, 1024, 256};
2991 vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
2992 &descriptor_set.set_, BINDING_COUNT, dyn_off);
2993 m_commandBuffer->Draw(1, 0, 0, 0);
2994 m_errorMonitor->VerifyNotFound();
2995
2996 m_commandBuffer->EndRenderPass();
2997 m_commandBuffer->end();
2998 }
2999
TEST_F(VkPositiveLayerTest,NonCoherentMemoryMapping)3000 TEST_F(VkPositiveLayerTest, NonCoherentMemoryMapping) {
3001 TEST_DESCRIPTION(
3002 "Ensure that validations handling of non-coherent memory mapping while using VK_WHOLE_SIZE does not cause access "
3003 "violations");
3004 VkResult err;
3005 uint8_t *pData;
3006 ASSERT_NO_FATAL_FAILURE(Init());
3007
3008 VkDeviceMemory mem;
3009 VkMemoryRequirements mem_reqs;
3010 mem_reqs.memoryTypeBits = 0xFFFFFFFF;
3011 const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
3012 VkMemoryAllocateInfo alloc_info = {};
3013 alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
3014 alloc_info.pNext = NULL;
3015 alloc_info.memoryTypeIndex = 0;
3016
3017 static const VkDeviceSize allocation_size = 32 * atom_size;
3018 alloc_info.allocationSize = allocation_size;
3019
3020 // Find a memory configurations WITHOUT a COHERENT bit, otherwise exit
3021 bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
3022 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
3023 if (!pass) {
3024 pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info,
3025 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
3026 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
3027 if (!pass) {
3028 pass = m_device->phy().set_memory_type(
3029 mem_reqs.memoryTypeBits, &alloc_info,
3030 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
3031 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
3032 if (!pass) {
3033 printf("%s Couldn't find a memory type wihtout a COHERENT bit.\n", kSkipPrefix);
3034 return;
3035 }
3036 }
3037 }
3038
3039 err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
3040 ASSERT_VK_SUCCESS(err);
3041
3042 // Map/Flush/Invalidate using WHOLE_SIZE and zero offsets and entire mapped range
3043 m_errorMonitor->ExpectSuccess();
3044 err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
3045 ASSERT_VK_SUCCESS(err);
3046 VkMappedMemoryRange mmr = {};
3047 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3048 mmr.memory = mem;
3049 mmr.offset = 0;
3050 mmr.size = VK_WHOLE_SIZE;
3051 err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
3052 ASSERT_VK_SUCCESS(err);
3053 err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
3054 ASSERT_VK_SUCCESS(err);
3055 m_errorMonitor->VerifyNotFound();
3056 vkUnmapMemory(m_device->device(), mem);
3057
3058 // Map/Flush/Invalidate using WHOLE_SIZE and an offset and entire mapped range
3059 m_errorMonitor->ExpectSuccess();
3060 err = vkMapMemory(m_device->device(), mem, 5 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
3061 ASSERT_VK_SUCCESS(err);
3062 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3063 mmr.memory = mem;
3064 mmr.offset = 6 * atom_size;
3065 mmr.size = VK_WHOLE_SIZE;
3066 err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
3067 ASSERT_VK_SUCCESS(err);
3068 err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
3069 ASSERT_VK_SUCCESS(err);
3070 m_errorMonitor->VerifyNotFound();
3071 vkUnmapMemory(m_device->device(), mem);
3072
3073 // Map with offset and size
3074 // Flush/Invalidate subrange of mapped area with offset and size
3075 m_errorMonitor->ExpectSuccess();
3076 err = vkMapMemory(m_device->device(), mem, 3 * atom_size, 9 * atom_size, 0, (void **)&pData);
3077 ASSERT_VK_SUCCESS(err);
3078 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3079 mmr.memory = mem;
3080 mmr.offset = 4 * atom_size;
3081 mmr.size = 2 * atom_size;
3082 err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
3083 ASSERT_VK_SUCCESS(err);
3084 err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
3085 ASSERT_VK_SUCCESS(err);
3086 m_errorMonitor->VerifyNotFound();
3087 vkUnmapMemory(m_device->device(), mem);
3088
3089 // Map without offset and flush WHOLE_SIZE with two separate offsets
3090 m_errorMonitor->ExpectSuccess();
3091 err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
3092 ASSERT_VK_SUCCESS(err);
3093 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3094 mmr.memory = mem;
3095 mmr.offset = allocation_size - (4 * atom_size);
3096 mmr.size = VK_WHOLE_SIZE;
3097 err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
3098 ASSERT_VK_SUCCESS(err);
3099 mmr.offset = allocation_size - (6 * atom_size);
3100 mmr.size = VK_WHOLE_SIZE;
3101 err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
3102 ASSERT_VK_SUCCESS(err);
3103 m_errorMonitor->VerifyNotFound();
3104 vkUnmapMemory(m_device->device(), mem);
3105
3106 vkFreeMemory(m_device->device(), mem, NULL);
3107 }
3108
3109 // This is a positive test. We used to expect error in this case but spec now allows it
TEST_F(VkPositiveLayerTest,ResetUnsignaledFence)3110 TEST_F(VkPositiveLayerTest, ResetUnsignaledFence) {
3111 m_errorMonitor->ExpectSuccess();
3112 vk_testing::Fence testFence;
3113 VkFenceCreateInfo fenceInfo = {};
3114 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
3115 fenceInfo.pNext = NULL;
3116
3117 ASSERT_NO_FATAL_FAILURE(Init());
3118 testFence.init(*m_device, fenceInfo);
3119 VkFence fences[1] = {testFence.handle()};
3120 VkResult result = vkResetFences(m_device->device(), 1, fences);
3121 ASSERT_VK_SUCCESS(result);
3122
3123 m_errorMonitor->VerifyNotFound();
3124 }
3125
TEST_F(VkPositiveLayerTest,CommandBufferSimultaneousUseSync)3126 TEST_F(VkPositiveLayerTest, CommandBufferSimultaneousUseSync) {
3127 m_errorMonitor->ExpectSuccess();
3128
3129 ASSERT_NO_FATAL_FAILURE(Init());
3130 VkResult err;
3131
3132 // Record (empty!) command buffer that can be submitted multiple times
3133 // simultaneously.
3134 VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
3135 VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, nullptr};
3136 m_commandBuffer->begin(&cbbi);
3137 m_commandBuffer->end();
3138
3139 VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
3140 VkFence fence;
3141 err = vkCreateFence(m_device->device(), &fci, nullptr, &fence);
3142 ASSERT_VK_SUCCESS(err);
3143
3144 VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
3145 VkSemaphore s1, s2;
3146 err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s1);
3147 ASSERT_VK_SUCCESS(err);
3148 err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s2);
3149 ASSERT_VK_SUCCESS(err);
3150
3151 // Submit CB once signaling s1, with fence so we can roll forward to its retirement.
3152 VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &m_commandBuffer->handle(), 1, &s1};
3153 err = vkQueueSubmit(m_device->m_queue, 1, &si, fence);
3154 ASSERT_VK_SUCCESS(err);
3155
3156 // Submit CB again, signaling s2.
3157 si.pSignalSemaphores = &s2;
3158 err = vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
3159 ASSERT_VK_SUCCESS(err);
3160
3161 // Wait for fence.
3162 err = vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
3163 ASSERT_VK_SUCCESS(err);
3164
3165 // CB is still in flight from second submission, but semaphore s1 is no
3166 // longer in flight. delete it.
3167 vkDestroySemaphore(m_device->device(), s1, nullptr);
3168
3169 m_errorMonitor->VerifyNotFound();
3170
3171 // Force device idle and clean up remaining objects
3172 vkDeviceWaitIdle(m_device->device());
3173 vkDestroySemaphore(m_device->device(), s2, nullptr);
3174 vkDestroyFence(m_device->device(), fence, nullptr);
3175 }
3176
TEST_F(VkPositiveLayerTest,FenceCreateSignaledWaitHandling)3177 TEST_F(VkPositiveLayerTest, FenceCreateSignaledWaitHandling) {
3178 m_errorMonitor->ExpectSuccess();
3179
3180 ASSERT_NO_FATAL_FAILURE(Init());
3181 VkResult err;
3182
3183 // A fence created signaled
3184 VkFenceCreateInfo fci1 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
3185 VkFence f1;
3186 err = vkCreateFence(m_device->device(), &fci1, nullptr, &f1);
3187 ASSERT_VK_SUCCESS(err);
3188
3189 // A fence created not
3190 VkFenceCreateInfo fci2 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
3191 VkFence f2;
3192 err = vkCreateFence(m_device->device(), &fci2, nullptr, &f2);
3193 ASSERT_VK_SUCCESS(err);
3194
3195 // Submit the unsignaled fence
3196 VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 0, nullptr, 0, nullptr};
3197 err = vkQueueSubmit(m_device->m_queue, 1, &si, f2);
3198
3199 // Wait on both fences, with signaled first.
3200 VkFence fences[] = {f1, f2};
3201 vkWaitForFences(m_device->device(), 2, fences, VK_TRUE, UINT64_MAX);
3202
3203 // Should have both retired!
3204 vkDestroyFence(m_device->device(), f1, nullptr);
3205 vkDestroyFence(m_device->device(), f2, nullptr);
3206
3207 m_errorMonitor->VerifyNotFound();
3208 }
3209
TEST_F(VkPositiveLayerTest,CreateImageViewFollowsParameterCompatibilityRequirements)3210 TEST_F(VkPositiveLayerTest, CreateImageViewFollowsParameterCompatibilityRequirements) {
3211 TEST_DESCRIPTION("Verify that creating an ImageView with valid usage does not generate validation errors.");
3212
3213 ASSERT_NO_FATAL_FAILURE(Init());
3214
3215 m_errorMonitor->ExpectSuccess();
3216
3217 VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3218 nullptr,
3219 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
3220 VK_IMAGE_TYPE_2D,
3221 VK_FORMAT_R8G8B8A8_UNORM,
3222 {128, 128, 1},
3223 1,
3224 1,
3225 VK_SAMPLE_COUNT_1_BIT,
3226 VK_IMAGE_TILING_OPTIMAL,
3227 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
3228 VK_SHARING_MODE_EXCLUSIVE,
3229 0,
3230 nullptr,
3231 VK_IMAGE_LAYOUT_UNDEFINED};
3232 VkImageObj image(m_device);
3233 image.init(&imgInfo);
3234 ASSERT_TRUE(image.initialized());
3235 image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
3236 m_errorMonitor->VerifyNotFound();
3237 }
3238
TEST_F(VkPositiveLayerTest,ValidUsage)3239 TEST_F(VkPositiveLayerTest, ValidUsage) {
3240 TEST_DESCRIPTION("Verify that creating an image view from an image with valid usage doesn't generate validation errors");
3241
3242 ASSERT_NO_FATAL_FAILURE(Init());
3243
3244 m_errorMonitor->ExpectSuccess();
3245 // Verify that we can create a view with usage INPUT_ATTACHMENT
3246 VkImageObj image(m_device);
3247 image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
3248 ASSERT_TRUE(image.initialized());
3249 VkImageView imageView;
3250 VkImageViewCreateInfo ivci = {};
3251 ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
3252 ivci.image = image.handle();
3253 ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
3254 ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
3255 ivci.subresourceRange.layerCount = 1;
3256 ivci.subresourceRange.baseMipLevel = 0;
3257 ivci.subresourceRange.levelCount = 1;
3258 ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3259
3260 vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
3261 m_errorMonitor->VerifyNotFound();
3262 vkDestroyImageView(m_device->device(), imageView, NULL);
3263 }
3264
3265 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,BindSparse)3266 TEST_F(VkPositiveLayerTest, BindSparse) {
3267 TEST_DESCRIPTION("Bind 2 memory ranges to one image using vkQueueBindSparse, destroy the image and then free the memory");
3268
3269 ASSERT_NO_FATAL_FAILURE(Init());
3270
3271 auto index = m_device->graphics_queue_node_index_;
3272 if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
3273 printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
3274 return;
3275 }
3276 if (!m_device->phy().features().sparseBinding) {
3277 printf("%s Device does not support sparse bindings.\n", kSkipPrefix);
3278 return;
3279 }
3280
3281 m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
3282
3283 VkImage image;
3284 VkImageCreateInfo image_create_info = {};
3285 image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
3286 image_create_info.pNext = NULL;
3287 image_create_info.imageType = VK_IMAGE_TYPE_2D;
3288 image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
3289 image_create_info.extent.width = 64;
3290 image_create_info.extent.height = 64;
3291 image_create_info.extent.depth = 1;
3292 image_create_info.mipLevels = 1;
3293 image_create_info.arrayLayers = 1;
3294 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
3295 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
3296 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3297 image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
3298 VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
3299 ASSERT_VK_SUCCESS(err);
3300
3301 VkMemoryRequirements memory_reqs;
3302 VkDeviceMemory memory_one, memory_two;
3303 bool pass;
3304 VkMemoryAllocateInfo memory_info = {};
3305 memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
3306 memory_info.pNext = NULL;
3307 memory_info.allocationSize = 0;
3308 memory_info.memoryTypeIndex = 0;
3309 vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
3310 // Find an image big enough to allow sparse mapping of 2 memory regions
3311 // Increase the image size until it is at least twice the
3312 // size of the required alignment, to ensure we can bind both
3313 // allocated memory blocks to the image on aligned offsets.
3314 while (memory_reqs.size < (memory_reqs.alignment * 2)) {
3315 vkDestroyImage(m_device->device(), image, nullptr);
3316 image_create_info.extent.width *= 2;
3317 image_create_info.extent.height *= 2;
3318 err = vkCreateImage(m_device->device(), &image_create_info, nullptr, &image);
3319 ASSERT_VK_SUCCESS(err);
3320 vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
3321 }
3322 // Allocate 2 memory regions of minimum alignment size, bind one at 0, the other
3323 // at the end of the first
3324 memory_info.allocationSize = memory_reqs.alignment;
3325 pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
3326 ASSERT_TRUE(pass);
3327 err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &memory_one);
3328 ASSERT_VK_SUCCESS(err);
3329 err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &memory_two);
3330 ASSERT_VK_SUCCESS(err);
3331 VkSparseMemoryBind binds[2];
3332 binds[0].flags = 0;
3333 binds[0].memory = memory_one;
3334 binds[0].memoryOffset = 0;
3335 binds[0].resourceOffset = 0;
3336 binds[0].size = memory_info.allocationSize;
3337 binds[1].flags = 0;
3338 binds[1].memory = memory_two;
3339 binds[1].memoryOffset = 0;
3340 binds[1].resourceOffset = memory_info.allocationSize;
3341 binds[1].size = memory_info.allocationSize;
3342
3343 VkSparseImageOpaqueMemoryBindInfo opaqueBindInfo;
3344 opaqueBindInfo.image = image;
3345 opaqueBindInfo.bindCount = 2;
3346 opaqueBindInfo.pBinds = binds;
3347
3348 VkFence fence = VK_NULL_HANDLE;
3349 VkBindSparseInfo bindSparseInfo = {};
3350 bindSparseInfo.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
3351 bindSparseInfo.imageOpaqueBindCount = 1;
3352 bindSparseInfo.pImageOpaqueBinds = &opaqueBindInfo;
3353
3354 vkQueueBindSparse(m_device->m_queue, 1, &bindSparseInfo, fence);
3355 vkQueueWaitIdle(m_device->m_queue);
3356 vkDestroyImage(m_device->device(), image, NULL);
3357 vkFreeMemory(m_device->device(), memory_one, NULL);
3358 vkFreeMemory(m_device->device(), memory_two, NULL);
3359 m_errorMonitor->VerifyNotFound();
3360 }
3361
TEST_F(VkPositiveLayerTest,BindSparseMetadata)3362 TEST_F(VkPositiveLayerTest, BindSparseMetadata) {
3363 TEST_DESCRIPTION("Bind memory for the metadata aspect of a sparse image");
3364
3365 ASSERT_NO_FATAL_FAILURE(Init());
3366
3367 auto index = m_device->graphics_queue_node_index_;
3368 if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
3369 printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
3370 return;
3371 }
3372 if (!m_device->phy().features().sparseResidencyImage2D) {
3373 printf("%s Device does not support sparse residency for images.\n", kSkipPrefix);
3374 return;
3375 }
3376
3377 m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
3378
3379 // Create a sparse image
3380 VkImage image;
3381 VkImageCreateInfo image_create_info = {};
3382 image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
3383 image_create_info.pNext = NULL;
3384 image_create_info.imageType = VK_IMAGE_TYPE_2D;
3385 image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
3386 image_create_info.extent.width = 64;
3387 image_create_info.extent.height = 64;
3388 image_create_info.extent.depth = 1;
3389 image_create_info.mipLevels = 1;
3390 image_create_info.arrayLayers = 1;
3391 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
3392 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
3393 image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3394 image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
3395 VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
3396 ASSERT_VK_SUCCESS(err);
3397
3398 // Query image memory requirements
3399 VkMemoryRequirements memory_reqs;
3400 vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
3401
3402 // Query sparse memory requirements
3403 uint32_t sparse_reqs_count = 0;
3404 vkGetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, nullptr);
3405 std::vector<VkSparseImageMemoryRequirements> sparse_reqs(sparse_reqs_count);
3406 vkGetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, sparse_reqs.data());
3407
3408 // Find requirements for metadata aspect
3409 const VkSparseImageMemoryRequirements *metadata_reqs = nullptr;
3410 for (auto const &aspect_sparse_reqs : sparse_reqs) {
3411 if (aspect_sparse_reqs.formatProperties.aspectMask == VK_IMAGE_ASPECT_METADATA_BIT) {
3412 metadata_reqs = &aspect_sparse_reqs;
3413 }
3414 }
3415
3416 if (!metadata_reqs) {
3417 printf("%s Sparse image does not require memory for metadata.\n", kSkipPrefix);
3418 } else {
3419 // Allocate memory for the metadata
3420 VkDeviceMemory metadata_memory = VK_NULL_HANDLE;
3421 VkMemoryAllocateInfo metadata_memory_info = {};
3422 metadata_memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
3423 metadata_memory_info.allocationSize = metadata_reqs->imageMipTailSize;
3424 m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &metadata_memory_info, 0);
3425 err = vkAllocateMemory(m_device->device(), &metadata_memory_info, NULL, &metadata_memory);
3426 ASSERT_VK_SUCCESS(err);
3427
3428 // Bind metadata
3429 VkSparseMemoryBind sparse_bind = {};
3430 sparse_bind.resourceOffset = metadata_reqs->imageMipTailOffset;
3431 sparse_bind.size = metadata_reqs->imageMipTailSize;
3432 sparse_bind.memory = metadata_memory;
3433 sparse_bind.memoryOffset = 0;
3434 sparse_bind.flags = VK_SPARSE_MEMORY_BIND_METADATA_BIT;
3435
3436 VkSparseImageOpaqueMemoryBindInfo opaque_bind_info = {};
3437 opaque_bind_info.image = image;
3438 opaque_bind_info.bindCount = 1;
3439 opaque_bind_info.pBinds = &sparse_bind;
3440
3441 VkBindSparseInfo bind_info = {};
3442 bind_info.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
3443 bind_info.imageOpaqueBindCount = 1;
3444 bind_info.pImageOpaqueBinds = &opaque_bind_info;
3445
3446 vkQueueBindSparse(m_device->m_queue, 1, &bind_info, VK_NULL_HANDLE);
3447 m_errorMonitor->VerifyNotFound();
3448
3449 // Cleanup
3450 vkQueueWaitIdle(m_device->m_queue);
3451 vkFreeMemory(m_device->device(), metadata_memory, NULL);
3452 }
3453
3454 vkDestroyImage(m_device->device(), image, NULL);
3455 }
3456
TEST_F(VkPositiveLayerTest,FramebufferBindingDestroyCommandPool)3457 TEST_F(VkPositiveLayerTest, FramebufferBindingDestroyCommandPool) {
3458 TEST_DESCRIPTION(
3459 "This test should pass. Create a Framebuffer and command buffer, bind them together, then destroy command pool and "
3460 "framebuffer and verify there are no errors.");
3461
3462 m_errorMonitor->ExpectSuccess();
3463
3464 ASSERT_NO_FATAL_FAILURE(Init());
3465
3466 // A renderpass with one color attachment.
3467 VkAttachmentDescription attachment = {0,
3468 VK_FORMAT_R8G8B8A8_UNORM,
3469 VK_SAMPLE_COUNT_1_BIT,
3470 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
3471 VK_ATTACHMENT_STORE_OP_STORE,
3472 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
3473 VK_ATTACHMENT_STORE_OP_DONT_CARE,
3474 VK_IMAGE_LAYOUT_UNDEFINED,
3475 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
3476
3477 VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
3478
3479 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
3480
3481 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
3482
3483 VkRenderPass rp;
3484 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
3485 ASSERT_VK_SUCCESS(err);
3486
3487 // A compatible framebuffer.
3488 VkImageObj image(m_device);
3489 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
3490 ASSERT_TRUE(image.initialized());
3491
3492 VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
3493
3494 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
3495 VkFramebuffer fb;
3496 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
3497 ASSERT_VK_SUCCESS(err);
3498
3499 // Explicitly create a command buffer to bind the FB to so that we can then
3500 // destroy the command pool in order to implicitly free command buffer
3501 VkCommandPool command_pool;
3502 VkCommandPoolCreateInfo pool_create_info{};
3503 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
3504 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
3505 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
3506 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
3507
3508 VkCommandBuffer command_buffer;
3509 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
3510 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
3511 command_buffer_allocate_info.commandPool = command_pool;
3512 command_buffer_allocate_info.commandBufferCount = 1;
3513 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
3514 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
3515
3516 // Begin our cmd buffer with renderpass using our framebuffer
3517 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
3518 VkCommandBufferBeginInfo begin_info{};
3519 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
3520 vkBeginCommandBuffer(command_buffer, &begin_info);
3521
3522 vkCmdBeginRenderPass(command_buffer, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
3523 vkCmdEndRenderPass(command_buffer);
3524 vkEndCommandBuffer(command_buffer);
3525 // Destroy command pool to implicitly free command buffer
3526 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
3527 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
3528 vkDestroyRenderPass(m_device->device(), rp, nullptr);
3529 m_errorMonitor->VerifyNotFound();
3530 }
3531
TEST_F(VkPositiveLayerTest,FramebufferCreateDepthStencilLayoutTransitionForDepthOnlyImageView)3532 TEST_F(VkPositiveLayerTest, FramebufferCreateDepthStencilLayoutTransitionForDepthOnlyImageView) {
3533 TEST_DESCRIPTION(
3534 "Validate that when an imageView of a depth/stencil image is used as a depth/stencil framebuffer attachment, the "
3535 "aspectMask is ignored and both depth and stencil image subresources are used.");
3536
3537 ASSERT_NO_FATAL_FAILURE(Init());
3538 VkFormatProperties format_properties;
3539 vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &format_properties);
3540 if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
3541 printf("%s Image format does not support sampling.\n", kSkipPrefix);
3542 return;
3543 }
3544
3545 m_errorMonitor->ExpectSuccess();
3546
3547 VkAttachmentDescription attachment = {0,
3548 VK_FORMAT_D32_SFLOAT_S8_UINT,
3549 VK_SAMPLE_COUNT_1_BIT,
3550 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
3551 VK_ATTACHMENT_STORE_OP_STORE,
3552 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
3553 VK_ATTACHMENT_STORE_OP_DONT_CARE,
3554 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
3555 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
3556
3557 VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
3558
3559 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
3560
3561 VkSubpassDependency dep = {0,
3562 0,
3563 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
3564 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
3565 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3566 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3567 VK_DEPENDENCY_BY_REGION_BIT};
3568
3569 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
3570
3571 VkResult err;
3572 VkRenderPass rp;
3573 err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
3574 ASSERT_VK_SUCCESS(err);
3575
3576 VkImageObj image(m_device);
3577 image.InitNoLayout(32, 32, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
3578 0x26, // usage
3579 VK_IMAGE_TILING_OPTIMAL, 0);
3580 ASSERT_TRUE(image.initialized());
3581 image.SetLayout(0x6, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
3582
3583 VkImageView view = image.targetView(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT);
3584
3585 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
3586 VkFramebuffer fb;
3587 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
3588 ASSERT_VK_SUCCESS(err);
3589
3590 m_commandBuffer->begin();
3591
3592 VkImageMemoryBarrier imb = {};
3593 imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
3594 imb.pNext = nullptr;
3595 imb.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
3596 imb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
3597 imb.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
3598 imb.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3599 imb.srcQueueFamilyIndex = 0;
3600 imb.dstQueueFamilyIndex = 0;
3601 imb.image = image.handle();
3602 imb.subresourceRange.aspectMask = 0x6;
3603 imb.subresourceRange.baseMipLevel = 0;
3604 imb.subresourceRange.levelCount = 0x1;
3605 imb.subresourceRange.baseArrayLayer = 0;
3606 imb.subresourceRange.layerCount = 0x1;
3607
3608 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
3609 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &imb);
3610
3611 m_commandBuffer->end();
3612 m_commandBuffer->QueueCommandBuffer(false);
3613 m_errorMonitor->VerifyNotFound();
3614
3615 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
3616 vkDestroyRenderPass(m_device->device(), rp, nullptr);
3617 }
3618
3619 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,BarrierLayoutToImageUsage)3620 TEST_F(VkPositiveLayerTest, BarrierLayoutToImageUsage) {
3621 TEST_DESCRIPTION("Ensure barriers' new and old VkImageLayout are compatible with their images' VkImageUsageFlags");
3622
3623 m_errorMonitor->ExpectSuccess();
3624
3625 ASSERT_NO_FATAL_FAILURE(Init());
3626 auto depth_format = FindSupportedDepthStencilFormat(gpu());
3627 if (!depth_format) {
3628 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
3629 return;
3630 }
3631 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
3632
3633 VkImageMemoryBarrier img_barrier = {};
3634 img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
3635 img_barrier.pNext = NULL;
3636 img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
3637 img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
3638 img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
3639 img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
3640 img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3641 img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3642 img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3643 img_barrier.subresourceRange.baseArrayLayer = 0;
3644 img_barrier.subresourceRange.baseMipLevel = 0;
3645 img_barrier.subresourceRange.layerCount = 1;
3646 img_barrier.subresourceRange.levelCount = 1;
3647
3648 {
3649 VkImageObj img_color(m_device);
3650 img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
3651 ASSERT_TRUE(img_color.initialized());
3652
3653 VkImageObj img_ds1(m_device);
3654 img_ds1.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
3655 ASSERT_TRUE(img_ds1.initialized());
3656
3657 VkImageObj img_ds2(m_device);
3658 img_ds2.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
3659 ASSERT_TRUE(img_ds2.initialized());
3660
3661 VkImageObj img_xfer_src(m_device);
3662 img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
3663 ASSERT_TRUE(img_xfer_src.initialized());
3664
3665 VkImageObj img_xfer_dst(m_device);
3666 img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
3667 ASSERT_TRUE(img_xfer_dst.initialized());
3668
3669 VkImageObj img_sampled(m_device);
3670 img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3671 ASSERT_TRUE(img_sampled.initialized());
3672
3673 VkImageObj img_input(m_device);
3674 img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
3675 ASSERT_TRUE(img_input.initialized());
3676
3677 const struct {
3678 VkImageObj &image_obj;
3679 VkImageLayout old_layout;
3680 VkImageLayout new_layout;
3681 } buffer_layouts[] = {
3682 // clang-format off
3683 {img_color, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3684 {img_ds1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3685 {img_ds2, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3686 {img_sampled, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3687 {img_input, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3688 {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3689 {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
3690 // clang-format on
3691 };
3692 const uint32_t layout_count = sizeof(buffer_layouts) / sizeof(buffer_layouts[0]);
3693
3694 m_commandBuffer->begin();
3695 for (uint32_t i = 0; i < layout_count; ++i) {
3696 img_barrier.image = buffer_layouts[i].image_obj.handle();
3697 const VkImageUsageFlags usage = buffer_layouts[i].image_obj.usage();
3698 img_barrier.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
3699 ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
3700 : VK_IMAGE_ASPECT_COLOR_BIT;
3701
3702 img_barrier.oldLayout = buffer_layouts[i].old_layout;
3703 img_barrier.newLayout = buffer_layouts[i].new_layout;
3704 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
3705 nullptr, 0, nullptr, 1, &img_barrier);
3706
3707 img_barrier.oldLayout = buffer_layouts[i].new_layout;
3708 img_barrier.newLayout = buffer_layouts[i].old_layout;
3709 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
3710 nullptr, 0, nullptr, 1, &img_barrier);
3711 }
3712 m_commandBuffer->end();
3713
3714 img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
3715 img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
3716 }
3717 m_errorMonitor->VerifyNotFound();
3718 }
3719
3720 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,WaitEventThenSet)3721 TEST_F(VkPositiveLayerTest, WaitEventThenSet) {
3722 TEST_DESCRIPTION("Wait on a event then set it after the wait has been submitted.");
3723
3724 m_errorMonitor->ExpectSuccess();
3725 ASSERT_NO_FATAL_FAILURE(Init());
3726
3727 VkEvent event;
3728 VkEventCreateInfo event_create_info{};
3729 event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
3730 vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
3731
3732 VkCommandPool command_pool;
3733 VkCommandPoolCreateInfo pool_create_info{};
3734 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
3735 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
3736 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
3737 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
3738
3739 VkCommandBuffer command_buffer;
3740 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
3741 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
3742 command_buffer_allocate_info.commandPool = command_pool;
3743 command_buffer_allocate_info.commandBufferCount = 1;
3744 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
3745 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
3746
3747 VkQueue queue = VK_NULL_HANDLE;
3748 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
3749
3750 {
3751 VkCommandBufferBeginInfo begin_info{};
3752 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
3753 vkBeginCommandBuffer(command_buffer, &begin_info);
3754
3755 vkCmdWaitEvents(command_buffer, 1, &event, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0,
3756 nullptr, 0, nullptr);
3757 vkCmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
3758 vkEndCommandBuffer(command_buffer);
3759 }
3760 {
3761 VkSubmitInfo submit_info{};
3762 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
3763 submit_info.commandBufferCount = 1;
3764 submit_info.pCommandBuffers = &command_buffer;
3765 submit_info.signalSemaphoreCount = 0;
3766 submit_info.pSignalSemaphores = nullptr;
3767 vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
3768 }
3769 { vkSetEvent(m_device->device(), event); }
3770
3771 vkQueueWaitIdle(queue);
3772
3773 vkDestroyEvent(m_device->device(), event, nullptr);
3774 vkFreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
3775 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
3776
3777 m_errorMonitor->VerifyNotFound();
3778 }
3779 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,QueryAndCopySecondaryCommandBuffers)3780 TEST_F(VkPositiveLayerTest, QueryAndCopySecondaryCommandBuffers) {
3781 TEST_DESCRIPTION("Issue a query on a secondary command buffer and copy it on a primary.");
3782
3783 ASSERT_NO_FATAL_FAILURE(Init());
3784 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
3785 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
3786 return;
3787 }
3788
3789 m_errorMonitor->ExpectSuccess();
3790
3791 VkQueryPool query_pool;
3792 VkQueryPoolCreateInfo query_pool_create_info{};
3793 query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
3794 query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
3795 query_pool_create_info.queryCount = 1;
3796 vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
3797
3798 VkCommandPoolObj command_pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
3799 VkCommandBufferObj primary_buffer(m_device, &command_pool);
3800 VkCommandBufferObj secondary_buffer(m_device, &command_pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
3801
3802 VkQueue queue = VK_NULL_HANDLE;
3803 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
3804
3805 uint32_t qfi = 0;
3806 VkBufferCreateInfo buff_create_info = {};
3807 buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3808 buff_create_info.size = 1024;
3809 buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3810 buff_create_info.queueFamilyIndexCount = 1;
3811 buff_create_info.pQueueFamilyIndices = &qfi;
3812
3813 VkBufferObj buffer;
3814 buffer.init(*m_device, buff_create_info);
3815
3816 VkCommandBufferInheritanceInfo hinfo = {};
3817 hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
3818 hinfo.renderPass = VK_NULL_HANDLE;
3819 hinfo.subpass = 0;
3820 hinfo.framebuffer = VK_NULL_HANDLE;
3821 hinfo.occlusionQueryEnable = VK_FALSE;
3822 hinfo.queryFlags = 0;
3823 hinfo.pipelineStatistics = 0;
3824
3825 {
3826 VkCommandBufferBeginInfo begin_info{};
3827 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
3828 begin_info.pInheritanceInfo = &hinfo;
3829 secondary_buffer.begin(&begin_info);
3830 vkCmdResetQueryPool(secondary_buffer.handle(), query_pool, 0, 1);
3831 vkCmdWriteTimestamp(secondary_buffer.handle(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
3832 secondary_buffer.end();
3833
3834 primary_buffer.begin();
3835 vkCmdExecuteCommands(primary_buffer.handle(), 1, &secondary_buffer.handle());
3836 vkCmdCopyQueryPoolResults(primary_buffer.handle(), query_pool, 0, 1, buffer.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
3837 primary_buffer.end();
3838 }
3839
3840 primary_buffer.QueueCommandBuffer();
3841 vkQueueWaitIdle(queue);
3842
3843 vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
3844 m_errorMonitor->VerifyNotFound();
3845 }
3846
3847 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,QueryAndCopyMultipleCommandBuffers)3848 TEST_F(VkPositiveLayerTest, QueryAndCopyMultipleCommandBuffers) {
3849 TEST_DESCRIPTION("Issue a query and copy from it on a second command buffer.");
3850
3851 ASSERT_NO_FATAL_FAILURE(Init());
3852 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
3853 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
3854 return;
3855 }
3856
3857 m_errorMonitor->ExpectSuccess();
3858
3859 VkQueryPool query_pool;
3860 VkQueryPoolCreateInfo query_pool_create_info{};
3861 query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
3862 query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
3863 query_pool_create_info.queryCount = 1;
3864 vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
3865
3866 VkCommandPool command_pool;
3867 VkCommandPoolCreateInfo pool_create_info{};
3868 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
3869 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
3870 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
3871 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
3872
3873 VkCommandBuffer command_buffer[2];
3874 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
3875 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
3876 command_buffer_allocate_info.commandPool = command_pool;
3877 command_buffer_allocate_info.commandBufferCount = 2;
3878 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
3879 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
3880
3881 VkQueue queue = VK_NULL_HANDLE;
3882 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
3883
3884 uint32_t qfi = 0;
3885 VkBufferCreateInfo buff_create_info = {};
3886 buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3887 buff_create_info.size = 1024;
3888 buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3889 buff_create_info.queueFamilyIndexCount = 1;
3890 buff_create_info.pQueueFamilyIndices = &qfi;
3891
3892 VkBufferObj buffer;
3893 buffer.init(*m_device, buff_create_info);
3894
3895 {
3896 VkCommandBufferBeginInfo begin_info{};
3897 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
3898 vkBeginCommandBuffer(command_buffer[0], &begin_info);
3899
3900 vkCmdResetQueryPool(command_buffer[0], query_pool, 0, 1);
3901 vkCmdWriteTimestamp(command_buffer[0], VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
3902
3903 vkEndCommandBuffer(command_buffer[0]);
3904
3905 vkBeginCommandBuffer(command_buffer[1], &begin_info);
3906
3907 vkCmdCopyQueryPoolResults(command_buffer[1], query_pool, 0, 1, buffer.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
3908
3909 vkEndCommandBuffer(command_buffer[1]);
3910 }
3911 {
3912 VkSubmitInfo submit_info{};
3913 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
3914 submit_info.commandBufferCount = 2;
3915 submit_info.pCommandBuffers = command_buffer;
3916 submit_info.signalSemaphoreCount = 0;
3917 submit_info.pSignalSemaphores = nullptr;
3918 vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
3919 }
3920
3921 vkQueueWaitIdle(queue);
3922
3923 vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
3924 vkFreeCommandBuffers(m_device->device(), command_pool, 2, command_buffer);
3925 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
3926
3927 m_errorMonitor->VerifyNotFound();
3928 }
3929
3930 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoFencesThreeFrames)3931 TEST_F(VkPositiveLayerTest, TwoFencesThreeFrames) {
3932 TEST_DESCRIPTION(
3933 "Two command buffers with two separate fences are each run through a Submit & WaitForFences cycle 3 times. This previously "
3934 "revealed a bug so running this positive test to prevent a regression.");
3935 m_errorMonitor->ExpectSuccess();
3936
3937 ASSERT_NO_FATAL_FAILURE(Init());
3938 VkQueue queue = VK_NULL_HANDLE;
3939 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
3940
3941 static const uint32_t NUM_OBJECTS = 2;
3942 static const uint32_t NUM_FRAMES = 3;
3943 VkCommandBuffer cmd_buffers[NUM_OBJECTS] = {};
3944 VkFence fences[NUM_OBJECTS] = {};
3945
3946 VkCommandPool cmd_pool;
3947 VkCommandPoolCreateInfo cmd_pool_ci = {};
3948 cmd_pool_ci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
3949 cmd_pool_ci.queueFamilyIndex = m_device->graphics_queue_node_index_;
3950 cmd_pool_ci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
3951 VkResult err = vkCreateCommandPool(m_device->device(), &cmd_pool_ci, nullptr, &cmd_pool);
3952 ASSERT_VK_SUCCESS(err);
3953
3954 VkCommandBufferAllocateInfo cmd_buf_info = {};
3955 cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
3956 cmd_buf_info.commandPool = cmd_pool;
3957 cmd_buf_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
3958 cmd_buf_info.commandBufferCount = 1;
3959
3960 VkFenceCreateInfo fence_ci = {};
3961 fence_ci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
3962 fence_ci.pNext = nullptr;
3963 fence_ci.flags = 0;
3964
3965 for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
3966 err = vkAllocateCommandBuffers(m_device->device(), &cmd_buf_info, &cmd_buffers[i]);
3967 ASSERT_VK_SUCCESS(err);
3968 err = vkCreateFence(m_device->device(), &fence_ci, nullptr, &fences[i]);
3969 ASSERT_VK_SUCCESS(err);
3970 }
3971
3972 for (uint32_t frame = 0; frame < NUM_FRAMES; ++frame) {
3973 for (uint32_t obj = 0; obj < NUM_OBJECTS; ++obj) {
3974 // Create empty cmd buffer
3975 VkCommandBufferBeginInfo cmdBufBeginDesc = {};
3976 cmdBufBeginDesc.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
3977
3978 err = vkBeginCommandBuffer(cmd_buffers[obj], &cmdBufBeginDesc);
3979 ASSERT_VK_SUCCESS(err);
3980 err = vkEndCommandBuffer(cmd_buffers[obj]);
3981 ASSERT_VK_SUCCESS(err);
3982
3983 VkSubmitInfo submit_info = {};
3984 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
3985 submit_info.commandBufferCount = 1;
3986 submit_info.pCommandBuffers = &cmd_buffers[obj];
3987 // Submit cmd buffer and wait for fence
3988 err = vkQueueSubmit(queue, 1, &submit_info, fences[obj]);
3989 ASSERT_VK_SUCCESS(err);
3990 err = vkWaitForFences(m_device->device(), 1, &fences[obj], VK_TRUE, UINT64_MAX);
3991 ASSERT_VK_SUCCESS(err);
3992 err = vkResetFences(m_device->device(), 1, &fences[obj]);
3993 ASSERT_VK_SUCCESS(err);
3994 }
3995 }
3996 m_errorMonitor->VerifyNotFound();
3997 vkDestroyCommandPool(m_device->device(), cmd_pool, NULL);
3998 for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
3999 vkDestroyFence(m_device->device(), fences[i], nullptr);
4000 }
4001 }
4002 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWI)4003 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWI) {
4004 TEST_DESCRIPTION(
4005 "Two command buffers, each in a separate QueueSubmit call submitted on separate queues followed by a QueueWaitIdle.");
4006
4007 ASSERT_NO_FATAL_FAILURE(Init());
4008 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
4009 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
4010 return;
4011 }
4012
4013 m_errorMonitor->ExpectSuccess();
4014
4015 VkSemaphore semaphore;
4016 VkSemaphoreCreateInfo semaphore_create_info{};
4017 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4018 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
4019
4020 VkCommandPool command_pool;
4021 VkCommandPoolCreateInfo pool_create_info{};
4022 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4023 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4024 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4025 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4026
4027 VkCommandBuffer command_buffer[2];
4028 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4029 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4030 command_buffer_allocate_info.commandPool = command_pool;
4031 command_buffer_allocate_info.commandBufferCount = 2;
4032 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4033 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4034
4035 VkQueue queue = VK_NULL_HANDLE;
4036 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
4037
4038 {
4039 VkCommandBufferBeginInfo begin_info{};
4040 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4041 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4042
4043 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4044 nullptr, 0, nullptr, 0, nullptr);
4045
4046 VkViewport viewport{};
4047 viewport.maxDepth = 1.0f;
4048 viewport.minDepth = 0.0f;
4049 viewport.width = 512;
4050 viewport.height = 512;
4051 viewport.x = 0;
4052 viewport.y = 0;
4053 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4054 vkEndCommandBuffer(command_buffer[0]);
4055 }
4056 {
4057 VkCommandBufferBeginInfo begin_info{};
4058 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4059 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4060
4061 VkViewport viewport{};
4062 viewport.maxDepth = 1.0f;
4063 viewport.minDepth = 0.0f;
4064 viewport.width = 512;
4065 viewport.height = 512;
4066 viewport.x = 0;
4067 viewport.y = 0;
4068 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4069 vkEndCommandBuffer(command_buffer[1]);
4070 }
4071 {
4072 VkSubmitInfo submit_info{};
4073 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4074 submit_info.commandBufferCount = 1;
4075 submit_info.pCommandBuffers = &command_buffer[0];
4076 submit_info.signalSemaphoreCount = 1;
4077 submit_info.pSignalSemaphores = &semaphore;
4078 vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
4079 }
4080 {
4081 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4082 VkSubmitInfo submit_info{};
4083 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4084 submit_info.commandBufferCount = 1;
4085 submit_info.pCommandBuffers = &command_buffer[1];
4086 submit_info.waitSemaphoreCount = 1;
4087 submit_info.pWaitSemaphores = &semaphore;
4088 submit_info.pWaitDstStageMask = flags;
4089 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
4090 }
4091
4092 vkQueueWaitIdle(m_device->m_queue);
4093
4094 vkDestroySemaphore(m_device->device(), semaphore, nullptr);
4095 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4096 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4097
4098 m_errorMonitor->VerifyNotFound();
4099 }
4100
4101 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWIFence)4102 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWIFence) {
4103 TEST_DESCRIPTION(
4104 "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
4105 "by a QueueWaitIdle.");
4106
4107 ASSERT_NO_FATAL_FAILURE(Init());
4108 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
4109 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
4110 return;
4111 }
4112
4113 m_errorMonitor->ExpectSuccess();
4114
4115 VkFence fence;
4116 VkFenceCreateInfo fence_create_info{};
4117 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4118 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4119
4120 VkSemaphore semaphore;
4121 VkSemaphoreCreateInfo semaphore_create_info{};
4122 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4123 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
4124
4125 VkCommandPool command_pool;
4126 VkCommandPoolCreateInfo pool_create_info{};
4127 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4128 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4129 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4130 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4131
4132 VkCommandBuffer command_buffer[2];
4133 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4134 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4135 command_buffer_allocate_info.commandPool = command_pool;
4136 command_buffer_allocate_info.commandBufferCount = 2;
4137 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4138 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4139
4140 VkQueue queue = VK_NULL_HANDLE;
4141 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
4142
4143 {
4144 VkCommandBufferBeginInfo begin_info{};
4145 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4146 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4147
4148 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4149 nullptr, 0, nullptr, 0, nullptr);
4150
4151 VkViewport viewport{};
4152 viewport.maxDepth = 1.0f;
4153 viewport.minDepth = 0.0f;
4154 viewport.width = 512;
4155 viewport.height = 512;
4156 viewport.x = 0;
4157 viewport.y = 0;
4158 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4159 vkEndCommandBuffer(command_buffer[0]);
4160 }
4161 {
4162 VkCommandBufferBeginInfo begin_info{};
4163 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4164 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4165
4166 VkViewport viewport{};
4167 viewport.maxDepth = 1.0f;
4168 viewport.minDepth = 0.0f;
4169 viewport.width = 512;
4170 viewport.height = 512;
4171 viewport.x = 0;
4172 viewport.y = 0;
4173 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4174 vkEndCommandBuffer(command_buffer[1]);
4175 }
4176 {
4177 VkSubmitInfo submit_info{};
4178 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4179 submit_info.commandBufferCount = 1;
4180 submit_info.pCommandBuffers = &command_buffer[0];
4181 submit_info.signalSemaphoreCount = 1;
4182 submit_info.pSignalSemaphores = &semaphore;
4183 vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
4184 }
4185 {
4186 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4187 VkSubmitInfo submit_info{};
4188 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4189 submit_info.commandBufferCount = 1;
4190 submit_info.pCommandBuffers = &command_buffer[1];
4191 submit_info.waitSemaphoreCount = 1;
4192 submit_info.pWaitSemaphores = &semaphore;
4193 submit_info.pWaitDstStageMask = flags;
4194 vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
4195 }
4196
4197 vkQueueWaitIdle(m_device->m_queue);
4198
4199 vkDestroyFence(m_device->device(), fence, nullptr);
4200 vkDestroySemaphore(m_device->device(), semaphore, nullptr);
4201 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4202 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4203
4204 m_errorMonitor->VerifyNotFound();
4205 }
4206
4207 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceTwoWFF)4208 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceTwoWFF) {
4209 TEST_DESCRIPTION(
4210 "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
4211 "by two consecutive WaitForFences calls on the same fence.");
4212
4213 ASSERT_NO_FATAL_FAILURE(Init());
4214 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
4215 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
4216 return;
4217 }
4218
4219 m_errorMonitor->ExpectSuccess();
4220
4221 VkFence fence;
4222 VkFenceCreateInfo fence_create_info{};
4223 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4224 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4225
4226 VkSemaphore semaphore;
4227 VkSemaphoreCreateInfo semaphore_create_info{};
4228 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4229 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
4230
4231 VkCommandPool command_pool;
4232 VkCommandPoolCreateInfo pool_create_info{};
4233 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4234 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4235 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4236 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4237
4238 VkCommandBuffer command_buffer[2];
4239 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4240 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4241 command_buffer_allocate_info.commandPool = command_pool;
4242 command_buffer_allocate_info.commandBufferCount = 2;
4243 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4244 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4245
4246 VkQueue queue = VK_NULL_HANDLE;
4247 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
4248
4249 {
4250 VkCommandBufferBeginInfo begin_info{};
4251 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4252 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4253
4254 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4255 nullptr, 0, nullptr, 0, nullptr);
4256
4257 VkViewport viewport{};
4258 viewport.maxDepth = 1.0f;
4259 viewport.minDepth = 0.0f;
4260 viewport.width = 512;
4261 viewport.height = 512;
4262 viewport.x = 0;
4263 viewport.y = 0;
4264 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4265 vkEndCommandBuffer(command_buffer[0]);
4266 }
4267 {
4268 VkCommandBufferBeginInfo begin_info{};
4269 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4270 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4271
4272 VkViewport viewport{};
4273 viewport.maxDepth = 1.0f;
4274 viewport.minDepth = 0.0f;
4275 viewport.width = 512;
4276 viewport.height = 512;
4277 viewport.x = 0;
4278 viewport.y = 0;
4279 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4280 vkEndCommandBuffer(command_buffer[1]);
4281 }
4282 {
4283 VkSubmitInfo submit_info{};
4284 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4285 submit_info.commandBufferCount = 1;
4286 submit_info.pCommandBuffers = &command_buffer[0];
4287 submit_info.signalSemaphoreCount = 1;
4288 submit_info.pSignalSemaphores = &semaphore;
4289 vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
4290 }
4291 {
4292 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4293 VkSubmitInfo submit_info{};
4294 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4295 submit_info.commandBufferCount = 1;
4296 submit_info.pCommandBuffers = &command_buffer[1];
4297 submit_info.waitSemaphoreCount = 1;
4298 submit_info.pWaitSemaphores = &semaphore;
4299 submit_info.pWaitDstStageMask = flags;
4300 vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
4301 }
4302
4303 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4304 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4305
4306 vkDestroyFence(m_device->device(), fence, nullptr);
4307 vkDestroySemaphore(m_device->device(), semaphore, nullptr);
4308 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4309 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4310
4311 m_errorMonitor->VerifyNotFound();
4312 }
4313
TEST_F(VkPositiveLayerTest,TwoQueuesEnsureCorrectRetirementWithWorkStolen)4314 TEST_F(VkPositiveLayerTest, TwoQueuesEnsureCorrectRetirementWithWorkStolen) {
4315 ASSERT_NO_FATAL_FAILURE(Init());
4316 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
4317 printf("%s Test requires two queues, skipping\n", kSkipPrefix);
4318 return;
4319 }
4320
4321 VkResult err;
4322
4323 m_errorMonitor->ExpectSuccess();
4324
4325 VkQueue q0 = m_device->m_queue;
4326 VkQueue q1 = nullptr;
4327 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &q1);
4328 ASSERT_NE(q1, nullptr);
4329
4330 // An (empty) command buffer. We must have work in the first submission --
4331 // the layer treats unfenced work differently from fenced work.
4332 VkCommandPoolCreateInfo cpci = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0, 0};
4333 VkCommandPool pool;
4334 err = vkCreateCommandPool(m_device->device(), &cpci, nullptr, &pool);
4335 ASSERT_VK_SUCCESS(err);
4336 VkCommandBufferAllocateInfo cbai = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, nullptr, pool,
4337 VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1};
4338 VkCommandBuffer cb;
4339 err = vkAllocateCommandBuffers(m_device->device(), &cbai, &cb);
4340 ASSERT_VK_SUCCESS(err);
4341 VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
4342 err = vkBeginCommandBuffer(cb, &cbbi);
4343 ASSERT_VK_SUCCESS(err);
4344 err = vkEndCommandBuffer(cb);
4345 ASSERT_VK_SUCCESS(err);
4346
4347 // A semaphore
4348 VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
4349 VkSemaphore s;
4350 err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s);
4351 ASSERT_VK_SUCCESS(err);
4352
4353 // First submission, to q0
4354 VkSubmitInfo s0 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &cb, 1, &s};
4355
4356 err = vkQueueSubmit(q0, 1, &s0, VK_NULL_HANDLE);
4357 ASSERT_VK_SUCCESS(err);
4358
4359 // Second submission, to q1, waiting on s
4360 VkFlags waitmask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; // doesn't really matter what this value is.
4361 VkSubmitInfo s1 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &s, &waitmask, 0, nullptr, 0, nullptr};
4362
4363 err = vkQueueSubmit(q1, 1, &s1, VK_NULL_HANDLE);
4364 ASSERT_VK_SUCCESS(err);
4365
4366 // Wait for q0 idle
4367 err = vkQueueWaitIdle(q0);
4368 ASSERT_VK_SUCCESS(err);
4369
4370 // Command buffer should have been completed (it was on q0); reset the pool.
4371 vkFreeCommandBuffers(m_device->device(), pool, 1, &cb);
4372
4373 m_errorMonitor->VerifyNotFound();
4374
4375 // Force device completely idle and clean up resources
4376 vkDeviceWaitIdle(m_device->device());
4377 vkDestroyCommandPool(m_device->device(), pool, nullptr);
4378 vkDestroySemaphore(m_device->device(), s, nullptr);
4379 }
4380
4381 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFence)4382 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFence) {
4383 TEST_DESCRIPTION(
4384 "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence, "
4385 "followed by a WaitForFences call.");
4386
4387 ASSERT_NO_FATAL_FAILURE(Init());
4388 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
4389 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
4390 return;
4391 }
4392
4393 m_errorMonitor->ExpectSuccess();
4394
4395 VkFence fence;
4396 VkFenceCreateInfo fence_create_info{};
4397 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4398 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4399
4400 VkSemaphore semaphore;
4401 VkSemaphoreCreateInfo semaphore_create_info{};
4402 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4403 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
4404
4405 VkCommandPool command_pool;
4406 VkCommandPoolCreateInfo pool_create_info{};
4407 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4408 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4409 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4410 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4411
4412 VkCommandBuffer command_buffer[2];
4413 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4414 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4415 command_buffer_allocate_info.commandPool = command_pool;
4416 command_buffer_allocate_info.commandBufferCount = 2;
4417 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4418 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4419
4420 VkQueue queue = VK_NULL_HANDLE;
4421 vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
4422
4423 {
4424 VkCommandBufferBeginInfo begin_info{};
4425 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4426 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4427
4428 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4429 nullptr, 0, nullptr, 0, nullptr);
4430
4431 VkViewport viewport{};
4432 viewport.maxDepth = 1.0f;
4433 viewport.minDepth = 0.0f;
4434 viewport.width = 512;
4435 viewport.height = 512;
4436 viewport.x = 0;
4437 viewport.y = 0;
4438 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4439 vkEndCommandBuffer(command_buffer[0]);
4440 }
4441 {
4442 VkCommandBufferBeginInfo begin_info{};
4443 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4444 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4445
4446 VkViewport viewport{};
4447 viewport.maxDepth = 1.0f;
4448 viewport.minDepth = 0.0f;
4449 viewport.width = 512;
4450 viewport.height = 512;
4451 viewport.x = 0;
4452 viewport.y = 0;
4453 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4454 vkEndCommandBuffer(command_buffer[1]);
4455 }
4456 {
4457 VkSubmitInfo submit_info{};
4458 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4459 submit_info.commandBufferCount = 1;
4460 submit_info.pCommandBuffers = &command_buffer[0];
4461 submit_info.signalSemaphoreCount = 1;
4462 submit_info.pSignalSemaphores = &semaphore;
4463 vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
4464 }
4465 {
4466 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4467 VkSubmitInfo submit_info{};
4468 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4469 submit_info.commandBufferCount = 1;
4470 submit_info.pCommandBuffers = &command_buffer[1];
4471 submit_info.waitSemaphoreCount = 1;
4472 submit_info.pWaitSemaphores = &semaphore;
4473 submit_info.pWaitDstStageMask = flags;
4474 vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
4475 }
4476
4477 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4478
4479 vkDestroyFence(m_device->device(), fence, nullptr);
4480 vkDestroySemaphore(m_device->device(), semaphore, nullptr);
4481 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4482 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4483
4484 m_errorMonitor->VerifyNotFound();
4485 }
4486
4487 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsOneQueueWithSemaphoreAndOneFence)4488 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueWithSemaphoreAndOneFence) {
4489 TEST_DESCRIPTION(
4490 "Two command buffers, each in a separate QueueSubmit call on the same queue, sharing a signal/wait semaphore, the second "
4491 "having a fence, followed by a WaitForFences call.");
4492
4493 m_errorMonitor->ExpectSuccess();
4494
4495 ASSERT_NO_FATAL_FAILURE(Init());
4496 VkFence fence;
4497 VkFenceCreateInfo fence_create_info{};
4498 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4499 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4500
4501 VkSemaphore semaphore;
4502 VkSemaphoreCreateInfo semaphore_create_info{};
4503 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4504 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
4505
4506 VkCommandPool command_pool;
4507 VkCommandPoolCreateInfo pool_create_info{};
4508 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4509 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4510 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4511 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4512
4513 VkCommandBuffer command_buffer[2];
4514 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4515 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4516 command_buffer_allocate_info.commandPool = command_pool;
4517 command_buffer_allocate_info.commandBufferCount = 2;
4518 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4519 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4520
4521 {
4522 VkCommandBufferBeginInfo begin_info{};
4523 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4524 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4525
4526 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4527 nullptr, 0, nullptr, 0, nullptr);
4528
4529 VkViewport viewport{};
4530 viewport.maxDepth = 1.0f;
4531 viewport.minDepth = 0.0f;
4532 viewport.width = 512;
4533 viewport.height = 512;
4534 viewport.x = 0;
4535 viewport.y = 0;
4536 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4537 vkEndCommandBuffer(command_buffer[0]);
4538 }
4539 {
4540 VkCommandBufferBeginInfo begin_info{};
4541 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4542 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4543
4544 VkViewport viewport{};
4545 viewport.maxDepth = 1.0f;
4546 viewport.minDepth = 0.0f;
4547 viewport.width = 512;
4548 viewport.height = 512;
4549 viewport.x = 0;
4550 viewport.y = 0;
4551 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4552 vkEndCommandBuffer(command_buffer[1]);
4553 }
4554 {
4555 VkSubmitInfo submit_info{};
4556 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4557 submit_info.commandBufferCount = 1;
4558 submit_info.pCommandBuffers = &command_buffer[0];
4559 submit_info.signalSemaphoreCount = 1;
4560 submit_info.pSignalSemaphores = &semaphore;
4561 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
4562 }
4563 {
4564 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4565 VkSubmitInfo submit_info{};
4566 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4567 submit_info.commandBufferCount = 1;
4568 submit_info.pCommandBuffers = &command_buffer[1];
4569 submit_info.waitSemaphoreCount = 1;
4570 submit_info.pWaitSemaphores = &semaphore;
4571 submit_info.pWaitDstStageMask = flags;
4572 vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
4573 }
4574
4575 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4576
4577 vkDestroyFence(m_device->device(), fence, nullptr);
4578 vkDestroySemaphore(m_device->device(), semaphore, nullptr);
4579 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4580 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4581
4582 m_errorMonitor->VerifyNotFound();
4583 }
4584
4585 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsOneQueueNullQueueSubmitWithFence)4586 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueNullQueueSubmitWithFence) {
4587 TEST_DESCRIPTION(
4588 "Two command buffers, each in a separate QueueSubmit call on the same queue, no fences, followed by a third QueueSubmit "
4589 "with NO SubmitInfos but with a fence, followed by a WaitForFences call.");
4590
4591 m_errorMonitor->ExpectSuccess();
4592
4593 ASSERT_NO_FATAL_FAILURE(Init());
4594 VkFence fence;
4595 VkFenceCreateInfo fence_create_info{};
4596 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4597 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4598
4599 VkCommandPool command_pool;
4600 VkCommandPoolCreateInfo pool_create_info{};
4601 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4602 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4603 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4604 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4605
4606 VkCommandBuffer command_buffer[2];
4607 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4608 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4609 command_buffer_allocate_info.commandPool = command_pool;
4610 command_buffer_allocate_info.commandBufferCount = 2;
4611 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4612 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4613
4614 {
4615 VkCommandBufferBeginInfo begin_info{};
4616 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4617 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4618
4619 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4620 nullptr, 0, nullptr, 0, nullptr);
4621
4622 VkViewport viewport{};
4623 viewport.maxDepth = 1.0f;
4624 viewport.minDepth = 0.0f;
4625 viewport.width = 512;
4626 viewport.height = 512;
4627 viewport.x = 0;
4628 viewport.y = 0;
4629 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4630 vkEndCommandBuffer(command_buffer[0]);
4631 }
4632 {
4633 VkCommandBufferBeginInfo begin_info{};
4634 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4635 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4636
4637 VkViewport viewport{};
4638 viewport.maxDepth = 1.0f;
4639 viewport.minDepth = 0.0f;
4640 viewport.width = 512;
4641 viewport.height = 512;
4642 viewport.x = 0;
4643 viewport.y = 0;
4644 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4645 vkEndCommandBuffer(command_buffer[1]);
4646 }
4647 {
4648 VkSubmitInfo submit_info{};
4649 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4650 submit_info.commandBufferCount = 1;
4651 submit_info.pCommandBuffers = &command_buffer[0];
4652 submit_info.signalSemaphoreCount = 0;
4653 submit_info.pSignalSemaphores = VK_NULL_HANDLE;
4654 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
4655 }
4656 {
4657 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4658 VkSubmitInfo submit_info{};
4659 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4660 submit_info.commandBufferCount = 1;
4661 submit_info.pCommandBuffers = &command_buffer[1];
4662 submit_info.waitSemaphoreCount = 0;
4663 submit_info.pWaitSemaphores = VK_NULL_HANDLE;
4664 submit_info.pWaitDstStageMask = flags;
4665 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
4666 }
4667
4668 vkQueueSubmit(m_device->m_queue, 0, NULL, fence);
4669
4670 VkResult err = vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4671 ASSERT_VK_SUCCESS(err);
4672
4673 vkDestroyFence(m_device->device(), fence, nullptr);
4674 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4675 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4676
4677 m_errorMonitor->VerifyNotFound();
4678 }
4679
4680 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoQueueSubmitsOneQueueOneFence)4681 TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueOneFence) {
4682 TEST_DESCRIPTION(
4683 "Two command buffers, each in a separate QueueSubmit call on the same queue, the second having a fence, followed by a "
4684 "WaitForFences call.");
4685
4686 m_errorMonitor->ExpectSuccess();
4687
4688 ASSERT_NO_FATAL_FAILURE(Init());
4689 VkFence fence;
4690 VkFenceCreateInfo fence_create_info{};
4691 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4692 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4693
4694 VkCommandPool command_pool;
4695 VkCommandPoolCreateInfo pool_create_info{};
4696 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4697 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4698 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4699 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4700
4701 VkCommandBuffer command_buffer[2];
4702 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4703 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4704 command_buffer_allocate_info.commandPool = command_pool;
4705 command_buffer_allocate_info.commandBufferCount = 2;
4706 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4707 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4708
4709 {
4710 VkCommandBufferBeginInfo begin_info{};
4711 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4712 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4713
4714 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4715 nullptr, 0, nullptr, 0, nullptr);
4716
4717 VkViewport viewport{};
4718 viewport.maxDepth = 1.0f;
4719 viewport.minDepth = 0.0f;
4720 viewport.width = 512;
4721 viewport.height = 512;
4722 viewport.x = 0;
4723 viewport.y = 0;
4724 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4725 vkEndCommandBuffer(command_buffer[0]);
4726 }
4727 {
4728 VkCommandBufferBeginInfo begin_info{};
4729 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4730 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4731
4732 VkViewport viewport{};
4733 viewport.maxDepth = 1.0f;
4734 viewport.minDepth = 0.0f;
4735 viewport.width = 512;
4736 viewport.height = 512;
4737 viewport.x = 0;
4738 viewport.y = 0;
4739 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4740 vkEndCommandBuffer(command_buffer[1]);
4741 }
4742 {
4743 VkSubmitInfo submit_info{};
4744 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4745 submit_info.commandBufferCount = 1;
4746 submit_info.pCommandBuffers = &command_buffer[0];
4747 submit_info.signalSemaphoreCount = 0;
4748 submit_info.pSignalSemaphores = VK_NULL_HANDLE;
4749 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
4750 }
4751 {
4752 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4753 VkSubmitInfo submit_info{};
4754 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4755 submit_info.commandBufferCount = 1;
4756 submit_info.pCommandBuffers = &command_buffer[1];
4757 submit_info.waitSemaphoreCount = 0;
4758 submit_info.pWaitSemaphores = VK_NULL_HANDLE;
4759 submit_info.pWaitDstStageMask = flags;
4760 vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
4761 }
4762
4763 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4764
4765 vkDestroyFence(m_device->device(), fence, nullptr);
4766 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4767 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4768
4769 m_errorMonitor->VerifyNotFound();
4770 }
4771
4772 // This is a positive test. No errors should be generated.
TEST_F(VkPositiveLayerTest,TwoSubmitInfosWithSemaphoreOneQueueSubmitsOneFence)4773 TEST_F(VkPositiveLayerTest, TwoSubmitInfosWithSemaphoreOneQueueSubmitsOneFence) {
4774 TEST_DESCRIPTION(
4775 "Two command buffers each in a separate SubmitInfo sent in a single QueueSubmit call followed by a WaitForFences call.");
4776 ASSERT_NO_FATAL_FAILURE(Init());
4777
4778 m_errorMonitor->ExpectSuccess();
4779
4780 VkFence fence;
4781 VkFenceCreateInfo fence_create_info{};
4782 fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
4783 vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
4784
4785 VkSemaphore semaphore;
4786 VkSemaphoreCreateInfo semaphore_create_info{};
4787 semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4788 vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
4789
4790 VkCommandPool command_pool;
4791 VkCommandPoolCreateInfo pool_create_info{};
4792 pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
4793 pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
4794 pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
4795 vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
4796
4797 VkCommandBuffer command_buffer[2];
4798 VkCommandBufferAllocateInfo command_buffer_allocate_info{};
4799 command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
4800 command_buffer_allocate_info.commandPool = command_pool;
4801 command_buffer_allocate_info.commandBufferCount = 2;
4802 command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
4803 vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
4804
4805 {
4806 VkCommandBufferBeginInfo begin_info{};
4807 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4808 vkBeginCommandBuffer(command_buffer[0], &begin_info);
4809
4810 vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
4811 nullptr, 0, nullptr, 0, nullptr);
4812
4813 VkViewport viewport{};
4814 viewport.maxDepth = 1.0f;
4815 viewport.minDepth = 0.0f;
4816 viewport.width = 512;
4817 viewport.height = 512;
4818 viewport.x = 0;
4819 viewport.y = 0;
4820 vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
4821 vkEndCommandBuffer(command_buffer[0]);
4822 }
4823 {
4824 VkCommandBufferBeginInfo begin_info{};
4825 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4826 vkBeginCommandBuffer(command_buffer[1], &begin_info);
4827
4828 VkViewport viewport{};
4829 viewport.maxDepth = 1.0f;
4830 viewport.minDepth = 0.0f;
4831 viewport.width = 512;
4832 viewport.height = 512;
4833 viewport.x = 0;
4834 viewport.y = 0;
4835 vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
4836 vkEndCommandBuffer(command_buffer[1]);
4837 }
4838 {
4839 VkSubmitInfo submit_info[2];
4840 VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
4841
4842 submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4843 submit_info[0].pNext = NULL;
4844 submit_info[0].commandBufferCount = 1;
4845 submit_info[0].pCommandBuffers = &command_buffer[0];
4846 submit_info[0].signalSemaphoreCount = 1;
4847 submit_info[0].pSignalSemaphores = &semaphore;
4848 submit_info[0].waitSemaphoreCount = 0;
4849 submit_info[0].pWaitSemaphores = NULL;
4850 submit_info[0].pWaitDstStageMask = 0;
4851
4852 submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
4853 submit_info[1].pNext = NULL;
4854 submit_info[1].commandBufferCount = 1;
4855 submit_info[1].pCommandBuffers = &command_buffer[1];
4856 submit_info[1].waitSemaphoreCount = 1;
4857 submit_info[1].pWaitSemaphores = &semaphore;
4858 submit_info[1].pWaitDstStageMask = flags;
4859 submit_info[1].signalSemaphoreCount = 0;
4860 submit_info[1].pSignalSemaphores = NULL;
4861 vkQueueSubmit(m_device->m_queue, 2, &submit_info[0], fence);
4862 }
4863
4864 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
4865
4866 vkDestroyFence(m_device->device(), fence, nullptr);
4867 vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
4868 vkDestroyCommandPool(m_device->device(), command_pool, NULL);
4869 vkDestroySemaphore(m_device->device(), semaphore, nullptr);
4870
4871 m_errorMonitor->VerifyNotFound();
4872 }
4873
TEST_F(VkPositiveLayerTest,CreatePipelineAttribMatrixType)4874 TEST_F(VkPositiveLayerTest, CreatePipelineAttribMatrixType) {
4875 TEST_DESCRIPTION("Test that pipeline validation accepts matrices passed as vertex attributes");
4876 m_errorMonitor->ExpectSuccess();
4877
4878 ASSERT_NO_FATAL_FAILURE(Init());
4879 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4880
4881 VkVertexInputBindingDescription input_binding;
4882 memset(&input_binding, 0, sizeof(input_binding));
4883
4884 VkVertexInputAttributeDescription input_attribs[2];
4885 memset(input_attribs, 0, sizeof(input_attribs));
4886
4887 for (int i = 0; i < 2; i++) {
4888 input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
4889 input_attribs[i].location = i;
4890 }
4891
4892 char const *vsSource =
4893 "#version 450\n"
4894 "\n"
4895 "layout(location=0) in mat2x4 x;\n"
4896 "void main(){\n"
4897 " gl_Position = x[0] + x[1];\n"
4898 "}\n";
4899
4900 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
4901 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
4902
4903 CreatePipelineHelper pipe(*this);
4904 pipe.InitInfo();
4905 pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
4906 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
4907 pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
4908 pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
4909 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4910 pipe.InitState();
4911 pipe.CreateGraphicsPipeline();
4912 /* expect success */
4913 m_errorMonitor->VerifyNotFound();
4914 }
4915
TEST_F(VkPositiveLayerTest,CreatePipelineAttribArrayType)4916 TEST_F(VkPositiveLayerTest, CreatePipelineAttribArrayType) {
4917 m_errorMonitor->ExpectSuccess();
4918
4919 ASSERT_NO_FATAL_FAILURE(Init());
4920 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4921
4922 VkVertexInputBindingDescription input_binding;
4923 memset(&input_binding, 0, sizeof(input_binding));
4924
4925 VkVertexInputAttributeDescription input_attribs[2];
4926 memset(input_attribs, 0, sizeof(input_attribs));
4927
4928 for (int i = 0; i < 2; i++) {
4929 input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
4930 input_attribs[i].location = i;
4931 }
4932
4933 char const *vsSource =
4934 "#version 450\n"
4935 "\n"
4936 "layout(location=0) in vec4 x[2];\n"
4937 "void main(){\n"
4938 " gl_Position = x[0] + x[1];\n"
4939 "}\n";
4940
4941 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
4942 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
4943
4944 CreatePipelineHelper pipe(*this);
4945 pipe.InitInfo();
4946 pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
4947 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
4948 pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
4949 pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
4950 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4951 pipe.InitState();
4952 pipe.CreateGraphicsPipeline();
4953
4954 m_errorMonitor->VerifyNotFound();
4955 }
4956
TEST_F(VkPositiveLayerTest,CreatePipelineAttribComponents)4957 TEST_F(VkPositiveLayerTest, CreatePipelineAttribComponents) {
4958 TEST_DESCRIPTION(
4959 "Test that pipeline validation accepts consuming a vertex attribute through multiple vertex shader inputs, each consuming "
4960 "a different subset of the components, and that fragment shader-attachment validation tolerates multiple duplicate "
4961 "location outputs");
4962 m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
4963
4964 ASSERT_NO_FATAL_FAILURE(Init());
4965 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4966
4967 VkVertexInputBindingDescription input_binding;
4968 memset(&input_binding, 0, sizeof(input_binding));
4969
4970 VkVertexInputAttributeDescription input_attribs[3];
4971 memset(input_attribs, 0, sizeof(input_attribs));
4972
4973 for (int i = 0; i < 3; i++) {
4974 input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
4975 input_attribs[i].location = i;
4976 }
4977
4978 char const *vsSource =
4979 "#version 450\n"
4980 "\n"
4981 "layout(location=0) in vec4 x;\n"
4982 "layout(location=1) in vec3 y1;\n"
4983 "layout(location=1, component=3) in float y2;\n"
4984 "layout(location=2) in vec4 z;\n"
4985 "void main(){\n"
4986 " gl_Position = x + vec4(y1, y2) + z;\n"
4987 "}\n";
4988 char const *fsSource =
4989 "#version 450\n"
4990 "\n"
4991 "layout(location=0, component=0) out float color0;\n"
4992 "layout(location=0, component=1) out float color1;\n"
4993 "layout(location=0, component=2) out float color2;\n"
4994 "layout(location=0, component=3) out float color3;\n"
4995 "layout(location=1, component=0) out vec2 second_color0;\n"
4996 "layout(location=1, component=2) out vec2 second_color1;\n"
4997 "void main(){\n"
4998 " color0 = float(1);\n"
4999 " second_color0 = vec2(1);\n"
5000 "}\n";
5001
5002 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
5003 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5004
5005 VkPipelineObj pipe(m_device);
5006
5007 VkDescriptorSetObj descriptorSet(m_device);
5008 descriptorSet.AppendDummy();
5009 descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
5010
5011 // Create a renderPass with two color attachments
5012 VkAttachmentReference attachments[2] = {};
5013 attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
5014 attachments[1].attachment = 1;
5015 attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
5016
5017 VkSubpassDescription subpass = {};
5018 subpass.pColorAttachments = attachments;
5019 subpass.colorAttachmentCount = 2;
5020
5021 VkRenderPassCreateInfo rpci = {};
5022 rpci.subpassCount = 1;
5023 rpci.pSubpasses = &subpass;
5024 rpci.attachmentCount = 2;
5025
5026 VkAttachmentDescription attach_desc[2] = {};
5027 attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
5028 attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
5029 attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5030 attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
5031 attach_desc[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
5032 attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
5033 attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
5034 attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5035 attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
5036 attach_desc[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
5037
5038 rpci.pAttachments = attach_desc;
5039 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
5040
5041 VkRenderPass renderpass;
5042 vkCreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
5043 pipe.AddShader(&vs);
5044 pipe.AddShader(&fs);
5045
5046 VkPipelineColorBlendAttachmentState att_state1 = {};
5047 att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
5048 att_state1.blendEnable = VK_FALSE;
5049
5050 pipe.AddColorAttachment(0, att_state1);
5051 pipe.AddColorAttachment(1, att_state1);
5052 pipe.AddVertexInputBindings(&input_binding, 1);
5053 pipe.AddVertexInputAttribs(input_attribs, 3);
5054 pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
5055 vkDestroyRenderPass(m_device->device(), renderpass, nullptr);
5056
5057 m_errorMonitor->VerifyNotFound();
5058 }
5059
TEST_F(VkPositiveLayerTest,CreatePipelineSimplePositive)5060 TEST_F(VkPositiveLayerTest, CreatePipelineSimplePositive) {
5061 m_errorMonitor->ExpectSuccess();
5062
5063 ASSERT_NO_FATAL_FAILURE(Init());
5064 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5065
5066 CreatePipelineHelper pipe(*this);
5067 pipe.InitInfo();
5068 pipe.InitState();
5069 pipe.CreateGraphicsPipeline();
5070
5071 m_errorMonitor->VerifyNotFound();
5072 }
5073
TEST_F(VkPositiveLayerTest,CreatePipelineRelaxedTypeMatch)5074 TEST_F(VkPositiveLayerTest, CreatePipelineRelaxedTypeMatch) {
5075 TEST_DESCRIPTION(
5076 "Test that pipeline validation accepts the relaxed type matching rules set out in 14.1.3: fundamental type must match, and "
5077 "producer side must have at least as many components");
5078 m_errorMonitor->ExpectSuccess();
5079
5080 // VK 1.0.8 Specification, 14.1.3 "Additionally,..." block
5081
5082 ASSERT_NO_FATAL_FAILURE(Init());
5083 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5084
5085 char const *vsSource =
5086 "#version 450\n"
5087 "layout(location=0) out vec3 x;\n"
5088 "layout(location=1) out ivec3 y;\n"
5089 "layout(location=2) out vec3 z;\n"
5090 "void main(){\n"
5091 " gl_Position = vec4(0);\n"
5092 " x = vec3(0); y = ivec3(0); z = vec3(0);\n"
5093 "}\n";
5094 char const *fsSource =
5095 "#version 450\n"
5096 "\n"
5097 "layout(location=0) out vec4 color;\n"
5098 "layout(location=0) in float x;\n"
5099 "layout(location=1) flat in int y;\n"
5100 "layout(location=2) in vec2 z;\n"
5101 "void main(){\n"
5102 " color = vec4(1 + x + y + z.x);\n"
5103 "}\n";
5104
5105 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
5106 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5107
5108 CreatePipelineHelper pipe(*this);
5109 pipe.InitInfo();
5110 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
5111 pipe.InitState();
5112 pipe.CreateGraphicsPipeline();
5113
5114 m_errorMonitor->VerifyNotFound();
5115 }
5116
TEST_F(VkPositiveLayerTest,CreatePipelineTessPerVertex)5117 TEST_F(VkPositiveLayerTest, CreatePipelineTessPerVertex) {
5118 TEST_DESCRIPTION("Test that pipeline validation accepts per-vertex variables passed between the TCS and TES stages");
5119 m_errorMonitor->ExpectSuccess();
5120
5121 ASSERT_NO_FATAL_FAILURE(Init());
5122 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5123
5124 if (!m_device->phy().features().tessellationShader) {
5125 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
5126 return;
5127 }
5128
5129 char const *tcsSource =
5130 "#version 450\n"
5131 "layout(location=0) out int x[];\n"
5132 "layout(vertices=3) out;\n"
5133 "void main(){\n"
5134 " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
5135 " gl_TessLevelInner[0] = 1;\n"
5136 " x[gl_InvocationID] = gl_InvocationID;\n"
5137 "}\n";
5138 char const *tesSource =
5139 "#version 450\n"
5140 "layout(triangles, equal_spacing, cw) in;\n"
5141 "layout(location=0) in int x[];\n"
5142 "void main(){\n"
5143 " gl_Position.xyz = gl_TessCoord;\n"
5144 " gl_Position.w = x[0] + x[1] + x[2];\n"
5145 "}\n";
5146
5147 VkShaderObj vs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
5148 VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
5149 VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
5150 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5151
5152 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
5153 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
5154
5155 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
5156
5157 CreatePipelineHelper pipe(*this);
5158 pipe.InitInfo();
5159 pipe.gp_ci_.pTessellationState = &tsci;
5160 pipe.gp_ci_.pInputAssemblyState = &iasci;
5161 pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
5162 pipe.InitState();
5163 pipe.CreateGraphicsPipeline();
5164 m_errorMonitor->VerifyNotFound();
5165 }
5166
TEST_F(VkPositiveLayerTest,CreatePipelineGeometryInputBlockPositive)5167 TEST_F(VkPositiveLayerTest, CreatePipelineGeometryInputBlockPositive) {
5168 TEST_DESCRIPTION(
5169 "Test that pipeline validation accepts a user-defined interface block passed into the geometry shader. This is interesting "
5170 "because the 'extra' array level is not present on the member type, but on the block instance.");
5171 m_errorMonitor->ExpectSuccess();
5172
5173 ASSERT_NO_FATAL_FAILURE(Init());
5174 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5175
5176 if (!m_device->phy().features().geometryShader) {
5177 printf("%s Device does not support geometry shaders; skipped.\n", kSkipPrefix);
5178 return;
5179 }
5180
5181 char const *gsSource =
5182 "#version 450\n"
5183 "layout(triangles) in;\n"
5184 "layout(triangle_strip, max_vertices=3) out;\n"
5185 "layout(location=0) in VertexData { vec4 x; } gs_in[];\n"
5186 "void main() {\n"
5187 " gl_Position = gs_in[0].x;\n"
5188 " EmitVertex();\n"
5189 "}\n";
5190
5191 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
5192 VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
5193 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5194
5195 CreatePipelineHelper pipe(*this);
5196 pipe.InitInfo();
5197 pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
5198 pipe.InitState();
5199 pipe.CreateGraphicsPipeline();
5200 m_errorMonitor->VerifyNotFound();
5201 }
5202
TEST_F(VkPositiveLayerTest,CreatePipeline64BitAttributesPositive)5203 TEST_F(VkPositiveLayerTest, CreatePipeline64BitAttributesPositive) {
5204 TEST_DESCRIPTION(
5205 "Test that pipeline validation accepts basic use of 64bit vertex attributes. This is interesting because they consume "
5206 "multiple locations.");
5207 m_errorMonitor->ExpectSuccess();
5208
5209 if (!EnableDeviceProfileLayer()) {
5210 printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
5211 return;
5212 }
5213
5214 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
5215 ASSERT_NO_FATAL_FAILURE(InitState());
5216 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5217
5218 if (!m_device->phy().features().shaderFloat64) {
5219 printf("%s Device does not support 64bit vertex attributes; skipped.\n", kSkipPrefix);
5220 return;
5221 }
5222 // Set 64bit format to support VTX Buffer feature
5223 PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
5224 PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
5225
5226 // Load required functions
5227 if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
5228 return;
5229 }
5230 VkFormatProperties format_props;
5231 fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, &format_props);
5232 format_props.bufferFeatures |= VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
5233 fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, format_props);
5234
5235 VkVertexInputBindingDescription input_bindings[1];
5236 memset(input_bindings, 0, sizeof(input_bindings));
5237
5238 VkVertexInputAttributeDescription input_attribs[4];
5239 memset(input_attribs, 0, sizeof(input_attribs));
5240 input_attribs[0].location = 0;
5241 input_attribs[0].offset = 0;
5242 input_attribs[0].format = VK_FORMAT_R64G64B64A64_SFLOAT;
5243 input_attribs[1].location = 2;
5244 input_attribs[1].offset = 32;
5245 input_attribs[1].format = VK_FORMAT_R64G64B64A64_SFLOAT;
5246 input_attribs[2].location = 4;
5247 input_attribs[2].offset = 64;
5248 input_attribs[2].format = VK_FORMAT_R64G64B64A64_SFLOAT;
5249 input_attribs[3].location = 6;
5250 input_attribs[3].offset = 96;
5251 input_attribs[3].format = VK_FORMAT_R64G64B64A64_SFLOAT;
5252
5253 char const *vsSource =
5254 "#version 450\n"
5255 "\n"
5256 "layout(location=0) in dmat4 x;\n"
5257 "void main(){\n"
5258 " gl_Position = vec4(x[0][0]);\n"
5259 "}\n";
5260
5261 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
5262 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5263
5264 CreatePipelineHelper pipe(*this);
5265 pipe.InitInfo();
5266 pipe.vi_ci_.pVertexBindingDescriptions = input_bindings;
5267 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
5268 pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
5269 pipe.vi_ci_.vertexAttributeDescriptionCount = 4;
5270 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
5271 pipe.InitState();
5272 pipe.CreateGraphicsPipeline();
5273 m_errorMonitor->VerifyNotFound();
5274 }
5275
TEST_F(VkPositiveLayerTest,CreatePipelineInputAttachmentPositive)5276 TEST_F(VkPositiveLayerTest, CreatePipelineInputAttachmentPositive) {
5277 TEST_DESCRIPTION("Positive test for a correctly matched input attachment");
5278 m_errorMonitor->ExpectSuccess();
5279
5280 ASSERT_NO_FATAL_FAILURE(Init());
5281
5282 char const *fsSource =
5283 "#version 450\n"
5284 "\n"
5285 "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
5286 "layout(location=0) out vec4 color;\n"
5287 "void main() {\n"
5288 " color = subpassLoad(x);\n"
5289 "}\n";
5290
5291 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
5292 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5293
5294 VkPipelineObj pipe(m_device);
5295 pipe.AddShader(&vs);
5296 pipe.AddShader(&fs);
5297 pipe.AddDefaultColorAttachment();
5298 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5299
5300 VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
5301 const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
5302 const VkPipelineLayoutObj pl(m_device, {&dsl});
5303
5304 VkAttachmentDescription descs[2] = {
5305 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
5306 VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5307 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5308 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
5309 VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
5310 };
5311 VkAttachmentReference color = {
5312 0,
5313 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5314 };
5315 VkAttachmentReference input = {
5316 1,
5317 VK_IMAGE_LAYOUT_GENERAL,
5318 };
5319
5320 VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
5321
5322 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
5323 VkRenderPass rp;
5324 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
5325 ASSERT_VK_SUCCESS(err);
5326
5327 // should be OK. would go wrong here if it's going to...
5328 pipe.CreateVKPipeline(pl.handle(), rp);
5329
5330 m_errorMonitor->VerifyNotFound();
5331
5332 vkDestroyRenderPass(m_device->device(), rp, nullptr);
5333 }
5334
TEST_F(VkPositiveLayerTest,CreateComputePipelineMissingDescriptorUnusedPositive)5335 TEST_F(VkPositiveLayerTest, CreateComputePipelineMissingDescriptorUnusedPositive) {
5336 TEST_DESCRIPTION(
5337 "Test that pipeline validation accepts a compute pipeline which declares a descriptor-backed resource which is not "
5338 "provided, but the shader does not statically use it. This is interesting because it requires compute pipelines to have a "
5339 "proper descriptor use walk, which they didn't for some time.");
5340 m_errorMonitor->ExpectSuccess();
5341
5342 ASSERT_NO_FATAL_FAILURE(Init());
5343
5344 char const *csSource =
5345 "#version 450\n"
5346 "\n"
5347 "layout(local_size_x=1) in;\n"
5348 "layout(set=0, binding=0) buffer block { vec4 x; };\n"
5349 "void main(){\n"
5350 " // x is not used.\n"
5351 "}\n";
5352
5353 CreateComputePipelineHelper pipe(*this);
5354 pipe.InitInfo();
5355 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
5356 pipe.InitState();
5357 pipe.CreateComputePipeline();
5358
5359 m_errorMonitor->VerifyNotFound();
5360 }
5361
TEST_F(VkPositiveLayerTest,CreateComputePipelineCombinedImageSamplerConsumedAsSampler)5362 TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsSampler) {
5363 TEST_DESCRIPTION(
5364 "Test that pipeline validation accepts a shader consuming only the sampler portion of a combined image + sampler");
5365 m_errorMonitor->ExpectSuccess();
5366
5367 ASSERT_NO_FATAL_FAILURE(Init());
5368
5369 std::vector<VkDescriptorSetLayoutBinding> bindings = {
5370 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5371 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5372 {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5373 };
5374
5375 char const *csSource =
5376 "#version 450\n"
5377 "\n"
5378 "layout(local_size_x=1) in;\n"
5379 "layout(set=0, binding=0) uniform sampler s;\n"
5380 "layout(set=0, binding=1) uniform texture2D t;\n"
5381 "layout(set=0, binding=2) buffer block { vec4 x; };\n"
5382 "void main() {\n"
5383 " x = texture(sampler2D(t, s), vec2(0));\n"
5384 "}\n";
5385 CreateComputePipelineHelper pipe(*this);
5386 pipe.InitInfo();
5387 pipe.dsl_bindings_.resize(bindings.size());
5388 memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
5389 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
5390 pipe.InitState();
5391 m_errorMonitor->ExpectSuccess();
5392 pipe.CreateComputePipeline();
5393
5394 m_errorMonitor->VerifyNotFound();
5395 }
5396
TEST_F(VkPositiveLayerTest,CreateComputePipelineCombinedImageSamplerConsumedAsImage)5397 TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsImage) {
5398 TEST_DESCRIPTION(
5399 "Test that pipeline validation accepts a shader consuming only the image portion of a combined image + sampler");
5400 m_errorMonitor->ExpectSuccess();
5401
5402 ASSERT_NO_FATAL_FAILURE(Init());
5403
5404 std::vector<VkDescriptorSetLayoutBinding> bindings = {
5405 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5406 {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5407 {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5408 };
5409
5410 char const *csSource =
5411 "#version 450\n"
5412 "\n"
5413 "layout(local_size_x=1) in;\n"
5414 "layout(set=0, binding=0) uniform texture2D t;\n"
5415 "layout(set=0, binding=1) uniform sampler s;\n"
5416 "layout(set=0, binding=2) buffer block { vec4 x; };\n"
5417 "void main() {\n"
5418 " x = texture(sampler2D(t, s), vec2(0));\n"
5419 "}\n";
5420 CreateComputePipelineHelper pipe(*this);
5421 pipe.InitInfo();
5422 pipe.dsl_bindings_.resize(bindings.size());
5423 memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
5424 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
5425 pipe.InitState();
5426 m_errorMonitor->ExpectSuccess();
5427 pipe.CreateComputePipeline();
5428
5429 m_errorMonitor->VerifyNotFound();
5430 }
5431
TEST_F(VkPositiveLayerTest,CreateComputePipelineCombinedImageSamplerConsumedAsBoth)5432 TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsBoth) {
5433 TEST_DESCRIPTION(
5434 "Test that pipeline validation accepts a shader consuming both the sampler and the image of a combined image+sampler but "
5435 "via separate variables");
5436 m_errorMonitor->ExpectSuccess();
5437
5438 ASSERT_NO_FATAL_FAILURE(Init());
5439
5440 std::vector<VkDescriptorSetLayoutBinding> bindings = {
5441 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5442 {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
5443 };
5444
5445 char const *csSource =
5446 "#version 450\n"
5447 "\n"
5448 "layout(local_size_x=1) in;\n"
5449 "layout(set=0, binding=0) uniform texture2D t;\n"
5450 "layout(set=0, binding=0) uniform sampler s; // both binding 0!\n"
5451 "layout(set=0, binding=1) buffer block { vec4 x; };\n"
5452 "void main() {\n"
5453 " x = texture(sampler2D(t, s), vec2(0));\n"
5454 "}\n";
5455 CreateComputePipelineHelper pipe(*this);
5456 pipe.InitInfo();
5457 pipe.dsl_bindings_.resize(bindings.size());
5458 memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
5459 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
5460 pipe.InitState();
5461 m_errorMonitor->ExpectSuccess();
5462 pipe.CreateComputePipeline();
5463
5464 m_errorMonitor->VerifyNotFound();
5465 }
5466
TEST_F(VkPositiveLayerTest,CreateDescriptorSetBindingWithIgnoredSamplers)5467 TEST_F(VkPositiveLayerTest, CreateDescriptorSetBindingWithIgnoredSamplers) {
5468 TEST_DESCRIPTION("Test that layers conditionally do ignore the pImmutableSamplers on vkCreateDescriptorSetLayout");
5469
5470 bool prop2_found = false;
5471 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
5472 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5473 prop2_found = true;
5474 } else {
5475 printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
5476 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5477 }
5478
5479 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
5480 bool push_descriptor_found = false;
5481 if (prop2_found && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
5482 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
5483
5484 // In addition to the extension being supported we need to have at least one available
5485 // Some implementations report an invalid maxPushDescriptors of 0
5486 push_descriptor_found = GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0;
5487 } else {
5488 printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
5489 VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
5490 }
5491
5492 ASSERT_NO_FATAL_FAILURE(InitState());
5493 const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
5494 const uint64_t fake_address_32 = 0xCDCDCDCD;
5495 const void *fake_pointer =
5496 sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
5497 const VkSampler *hopefully_undereferencable_pointer = reinterpret_cast<const VkSampler *>(fake_pointer);
5498
5499 // regular descriptors
5500 m_errorMonitor->ExpectSuccess();
5501 {
5502 const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
5503 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5504 {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5505 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5506 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5507 {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5508 {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5509 {6, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5510 {7, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5511 {8, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5512 };
5513 const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr, 0,
5514 static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
5515 VkDescriptorSetLayout dsl;
5516 const VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
5517 ASSERT_VK_SUCCESS(err);
5518 vkDestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
5519 }
5520 m_errorMonitor->VerifyNotFound();
5521
5522 if (push_descriptor_found) {
5523 // push descriptors
5524 m_errorMonitor->ExpectSuccess();
5525 {
5526 const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
5527 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5528 {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5529 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5530 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5531 {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5532 {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5533 {6, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
5534 };
5535 const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr,
5536 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
5537 static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
5538 VkDescriptorSetLayout dsl;
5539 const VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
5540 ASSERT_VK_SUCCESS(err);
5541 vkDestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
5542 }
5543 m_errorMonitor->VerifyNotFound();
5544 }
5545 }
TEST_F(VkPositiveLayerTest,GpuValidationInlineUniformBlock)5546 TEST_F(VkPositiveLayerTest, GpuValidationInlineUniformBlock) {
5547 TEST_DESCRIPTION("GPU validation: Make sure inline uniform blocks don't generate false validation errors");
5548 m_errorMonitor->ExpectSuccess();
5549 VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
5550 VkValidationFeaturesEXT features = {};
5551 features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
5552 features.enabledValidationFeatureCount = 1;
5553 features.pEnabledValidationFeatures = enables;
5554 bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names,
5555 m_device_extension_names, &features, m_errorMonitor);
5556 if (DeviceIsMockICD() || DeviceSimulation()) {
5557 printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
5558 return;
5559 }
5560 VkPhysicalDeviceFeatures2KHR features2 = {};
5561 auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
5562 auto inline_uniform_block_features = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(&indexing_features);
5563 bool inline_uniform_block = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
5564 if (!(descriptor_indexing && inline_uniform_block)) {
5565 printf("Descriptor indexing and/or inline uniform block not supported Skipping test\n");
5566 return;
5567 }
5568 m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
5569 m_device_extension_names.push_back(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
5570 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
5571 (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
5572 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
5573
5574 features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&inline_uniform_block_features);
5575 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
5576 if (!indexing_features.descriptorBindingPartiallyBound || !inline_uniform_block_features.inlineUniformBlock) {
5577 printf("Not all features supported, skipping test\n");
5578 return;
5579 }
5580 auto inline_uniform_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
5581 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_props);
5582 vkGetPhysicalDeviceProperties2(gpu(), &prop2);
5583
5584 VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
5585 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
5586 if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
5587 printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
5588 return;
5589 }
5590 auto c_queue = m_device->GetDefaultComputeQueue();
5591 if (nullptr == c_queue) {
5592 printf("Compute not supported, skipping test\n");
5593 return;
5594 }
5595
5596 uint32_t qfi = 0;
5597 VkBufferCreateInfo bci = {};
5598 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5599 bci.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
5600 bci.size = 4;
5601 bci.queueFamilyIndexCount = 1;
5602 bci.pQueueFamilyIndices = &qfi;
5603 VkBufferObj buffer0;
5604 VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
5605 buffer0.init(*m_device, bci, mem_props);
5606
5607 VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
5608 ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
5609 VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
5610 layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
5611 layout_createinfo_binding_flags[0].pNext = NULL;
5612 layout_createinfo_binding_flags[0].bindingCount = 2;
5613 layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
5614
5615 OneOffDescriptorSet descriptor_set(m_device,
5616 {
5617 {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
5618 {1, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, 20, VK_SHADER_STAGE_ALL,
5619 nullptr}, // 16 bytes for ivec4, 4 more for int
5620 },
5621 0, layout_createinfo_binding_flags, 0);
5622 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
5623
5624 VkDescriptorBufferInfo buffer_info[1] = {};
5625 buffer_info[0].buffer = buffer0.handle();
5626 buffer_info[0].offset = 0;
5627 buffer_info[0].range = sizeof(uint32_t);
5628
5629 const uint32_t test_data = 0xdeadca7;
5630 VkWriteDescriptorSetInlineUniformBlockEXT write_inline_uniform = {};
5631 write_inline_uniform.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
5632 write_inline_uniform.dataSize = 4;
5633 write_inline_uniform.pData = &test_data;
5634
5635 VkWriteDescriptorSet descriptor_writes[2] = {};
5636 descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5637 descriptor_writes[0].dstSet = descriptor_set.set_;
5638 descriptor_writes[0].dstBinding = 0;
5639 descriptor_writes[0].descriptorCount = 1;
5640 descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
5641 descriptor_writes[0].pBufferInfo = buffer_info;
5642
5643 descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5644 descriptor_writes[1].dstSet = descriptor_set.set_;
5645 descriptor_writes[1].dstBinding = 1;
5646 descriptor_writes[1].dstArrayElement = 16; // Skip first 16 bytes (dummy)
5647 descriptor_writes[1].descriptorCount = 4; // Write 4 bytes to val
5648 descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
5649 descriptor_writes[1].pNext = &write_inline_uniform;
5650 vkUpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
5651
5652 char const *csSource =
5653 "#version 450\n"
5654 "#extension GL_EXT_nonuniform_qualifier : enable\n "
5655 "layout(set = 0, binding = 0) buffer StorageBuffer { uint index; } u_index;"
5656 "layout(set = 0, binding = 1) uniform inlineubodef { ivec4 dummy; int val; } inlineubo;\n"
5657
5658 "void main() {\n"
5659 " u_index.index = inlineubo.val;\n"
5660 "}\n";
5661
5662 auto shader_module = new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
5663
5664 VkPipelineShaderStageCreateInfo stage;
5665 stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
5666 stage.pNext = nullptr;
5667 stage.flags = 0;
5668 stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
5669 stage.module = shader_module->handle();
5670 stage.pName = "main";
5671 stage.pSpecializationInfo = nullptr;
5672
5673 // CreateComputePipelines
5674 VkComputePipelineCreateInfo pipeline_info = {};
5675 pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
5676 pipeline_info.pNext = nullptr;
5677 pipeline_info.flags = 0;
5678 pipeline_info.layout = pipeline_layout.handle();
5679 pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
5680 pipeline_info.basePipelineIndex = -1;
5681 pipeline_info.stage = stage;
5682
5683 VkPipeline c_pipeline;
5684 vkCreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
5685
5686 m_commandBuffer->begin();
5687 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
5688 vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout.handle(), 0, 1,
5689 &descriptor_set.set_, 0, nullptr);
5690 vkCmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
5691 m_commandBuffer->end();
5692
5693 VkSubmitInfo submit_info = {};
5694 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
5695 submit_info.commandBufferCount = 1;
5696 submit_info.pCommandBuffers = &m_commandBuffer->handle();
5697 vkQueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
5698 vkQueueWaitIdle(m_device->m_queue);
5699 m_errorMonitor->VerifyNotFound();
5700 vkDestroyPipeline(m_device->handle(), c_pipeline, NULL);
5701 vkDestroyShaderModule(m_device->handle(), shader_module->handle(), NULL);
5702
5703 uint32_t *data = (uint32_t *)buffer0.memory().map();
5704 ASSERT_TRUE(*data = test_data);
5705 buffer0.memory().unmap();
5706 }
5707
TEST_F(VkPositiveLayerTest,Maintenance1Tests)5708 TEST_F(VkPositiveLayerTest, Maintenance1Tests) {
5709 TEST_DESCRIPTION("Validate various special cases for the Maintenance1_KHR extension");
5710
5711 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
5712 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
5713 m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
5714 } else {
5715 printf("%s Maintenance1 Extension not supported, skipping tests\n", kSkipPrefix);
5716 return;
5717 }
5718 ASSERT_NO_FATAL_FAILURE(InitState());
5719
5720 m_errorMonitor->ExpectSuccess();
5721
5722 VkCommandBufferObj cmd_buf(m_device, m_commandPool);
5723 cmd_buf.begin();
5724 // Set Negative height, should give error if Maintenance 1 is not enabled
5725 VkViewport viewport = {0, 0, 16, -16, 0, 1};
5726 vkCmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
5727 cmd_buf.end();
5728
5729 m_errorMonitor->VerifyNotFound();
5730 }
5731
TEST_F(VkPositiveLayerTest,ValidStructPNext)5732 TEST_F(VkPositiveLayerTest, ValidStructPNext) {
5733 TEST_DESCRIPTION("Verify that a valid pNext value is handled correctly");
5734
5735 // Positive test to check parameter_validation and unique_objects support for NV_dedicated_allocation
5736 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
5737 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
5738 m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
5739 } else {
5740 printf("%s VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME Extension not supported, skipping test\n", kSkipPrefix);
5741 return;
5742 }
5743 ASSERT_NO_FATAL_FAILURE(InitState());
5744
5745 m_errorMonitor->ExpectSuccess();
5746
5747 VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
5748 dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
5749 dedicated_buffer_create_info.pNext = nullptr;
5750 dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
5751
5752 uint32_t queue_family_index = 0;
5753 VkBufferCreateInfo buffer_create_info = {};
5754 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5755 buffer_create_info.pNext = &dedicated_buffer_create_info;
5756 buffer_create_info.size = 1024;
5757 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
5758 buffer_create_info.queueFamilyIndexCount = 1;
5759 buffer_create_info.pQueueFamilyIndices = &queue_family_index;
5760
5761 VkBuffer buffer;
5762 VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
5763 ASSERT_VK_SUCCESS(err);
5764
5765 VkMemoryRequirements memory_reqs;
5766 vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
5767
5768 VkDedicatedAllocationMemoryAllocateInfoNV dedicated_memory_info = {};
5769 dedicated_memory_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
5770 dedicated_memory_info.pNext = nullptr;
5771 dedicated_memory_info.buffer = buffer;
5772 dedicated_memory_info.image = VK_NULL_HANDLE;
5773
5774 VkMemoryAllocateInfo memory_info = {};
5775 memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
5776 memory_info.pNext = &dedicated_memory_info;
5777 memory_info.allocationSize = memory_reqs.size;
5778
5779 bool pass;
5780 pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
5781 ASSERT_TRUE(pass);
5782
5783 VkDeviceMemory buffer_memory;
5784 err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
5785 ASSERT_VK_SUCCESS(err);
5786
5787 err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
5788 ASSERT_VK_SUCCESS(err);
5789
5790 vkDestroyBuffer(m_device->device(), buffer, NULL);
5791 vkFreeMemory(m_device->device(), buffer_memory, NULL);
5792
5793 m_errorMonitor->VerifyNotFound();
5794 }
5795
TEST_F(VkPositiveLayerTest,PSOPolygonModeValid)5796 TEST_F(VkPositiveLayerTest, PSOPolygonModeValid) {
5797 TEST_DESCRIPTION("Verify that using a solid polygon fill mode works correctly.");
5798
5799 ASSERT_NO_FATAL_FAILURE(Init());
5800 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5801
5802 std::vector<const char *> device_extension_names;
5803 auto features = m_device->phy().features();
5804 // Artificially disable support for non-solid fill modes
5805 features.fillModeNonSolid = false;
5806 // The sacrificial device object
5807 VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
5808
5809 VkRenderpassObj render_pass(&test_device);
5810
5811 const VkPipelineLayoutObj pipeline_layout(&test_device);
5812
5813 VkPipelineRasterizationStateCreateInfo rs_ci = {};
5814 rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
5815 rs_ci.pNext = nullptr;
5816 rs_ci.lineWidth = 1.0f;
5817 rs_ci.rasterizerDiscardEnable = false;
5818
5819 VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
5820 VkShaderObj fs(&test_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5821
5822 // Set polygonMode=FILL. No error is expected
5823 m_errorMonitor->ExpectSuccess();
5824 {
5825 VkPipelineObj pipe(&test_device);
5826 pipe.AddShader(&vs);
5827 pipe.AddShader(&fs);
5828 pipe.AddDefaultColorAttachment();
5829 // Set polygonMode to a good value
5830 rs_ci.polygonMode = VK_POLYGON_MODE_FILL;
5831 pipe.SetRasterization(&rs_ci);
5832 pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
5833 }
5834 m_errorMonitor->VerifyNotFound();
5835 }
5836
TEST_F(VkPositiveLayerTest,LongSemaphoreChain)5837 TEST_F(VkPositiveLayerTest, LongSemaphoreChain) {
5838 m_errorMonitor->ExpectSuccess();
5839
5840 ASSERT_NO_FATAL_FAILURE(Init());
5841 VkResult err;
5842
5843 std::vector<VkSemaphore> semaphores;
5844
5845 const int chainLength = 32768;
5846 VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
5847
5848 for (int i = 0; i < chainLength; i++) {
5849 VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
5850 VkSemaphore semaphore;
5851 err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &semaphore);
5852 ASSERT_VK_SUCCESS(err);
5853
5854 semaphores.push_back(semaphore);
5855
5856 VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO,
5857 nullptr,
5858 semaphores.size() > 1 ? 1u : 0u,
5859 semaphores.size() > 1 ? &semaphores[semaphores.size() - 2] : nullptr,
5860 &flags,
5861 0,
5862 nullptr,
5863 1,
5864 &semaphores[semaphores.size() - 1]};
5865 err = vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
5866 ASSERT_VK_SUCCESS(err);
5867 }
5868
5869 VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
5870 VkFence fence;
5871 err = vkCreateFence(m_device->device(), &fci, nullptr, &fence);
5872 ASSERT_VK_SUCCESS(err);
5873 VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &semaphores.back(), &flags, 0, nullptr, 0, nullptr};
5874 err = vkQueueSubmit(m_device->m_queue, 1, &si, fence);
5875 ASSERT_VK_SUCCESS(err);
5876
5877 vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
5878
5879 for (auto semaphore : semaphores) vkDestroySemaphore(m_device->device(), semaphore, nullptr);
5880
5881 vkDestroyFence(m_device->device(), fence, nullptr);
5882
5883 m_errorMonitor->VerifyNotFound();
5884 }
5885
TEST_F(VkPositiveLayerTest,ExternalSemaphore)5886 TEST_F(VkPositiveLayerTest, ExternalSemaphore) {
5887 #ifdef _WIN32
5888 const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
5889 const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
5890 #else
5891 const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
5892 const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
5893 #endif
5894 // Check for external semaphore instance extensions
5895 if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
5896 m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
5897 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5898 } else {
5899 printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
5900 return;
5901 }
5902 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
5903
5904 // Check for external semaphore device extensions
5905 if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
5906 m_device_extension_names.push_back(extension_name);
5907 m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
5908 } else {
5909 printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
5910 return;
5911 }
5912 ASSERT_NO_FATAL_FAILURE(InitState());
5913
5914 // Check for external semaphore import and export capability
5915 VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
5916 handle_type};
5917 VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
5918 auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
5919 (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vkGetInstanceProcAddr(
5920 instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
5921 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
5922
5923 if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
5924 !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
5925 printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
5926 return;
5927 }
5928
5929 VkResult err;
5930 m_errorMonitor->ExpectSuccess();
5931
5932 // Create a semaphore to export payload from
5933 VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
5934 VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
5935
5936 VkSemaphore export_semaphore;
5937 err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
5938 ASSERT_VK_SUCCESS(err);
5939
5940 // Create a semaphore to import payload into
5941 sci.pNext = nullptr;
5942 VkSemaphore import_semaphore;
5943 err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
5944 ASSERT_VK_SUCCESS(err);
5945
5946 #ifdef _WIN32
5947 // Export semaphore payload to an opaque handle
5948 HANDLE handle = nullptr;
5949 VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
5950 handle_type};
5951 auto vkGetSemaphoreWin32HandleKHR =
5952 (PFN_vkGetSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
5953 err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
5954 ASSERT_VK_SUCCESS(err);
5955
5956 // Import opaque handle exported above
5957 VkImportSemaphoreWin32HandleInfoKHR ihi = {
5958 VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, nullptr, import_semaphore, 0, handle_type, handle, nullptr};
5959 auto vkImportSemaphoreWin32HandleKHR =
5960 (PFN_vkImportSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
5961 err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
5962 ASSERT_VK_SUCCESS(err);
5963 #else
5964 // Export semaphore payload to an opaque handle
5965 int fd = 0;
5966 VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
5967 auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
5968 err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
5969 ASSERT_VK_SUCCESS(err);
5970
5971 // Import opaque handle exported above
5972 VkImportSemaphoreFdInfoKHR ihi = {
5973 VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr, import_semaphore, 0, handle_type, fd};
5974 auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
5975 err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
5976 ASSERT_VK_SUCCESS(err);
5977 #endif
5978
5979 // Signal the exported semaphore and wait on the imported semaphore
5980 VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
5981 VkSubmitInfo si[] = {
5982 {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
5983 {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
5984 {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
5985 {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
5986 };
5987 err = vkQueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
5988 ASSERT_VK_SUCCESS(err);
5989
5990 if (m_device->phy().features().sparseBinding) {
5991 // Signal the imported semaphore and wait on the exported semaphore
5992 VkBindSparseInfo bi[] = {
5993 {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
5994 {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
5995 {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
5996 {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
5997 };
5998 err = vkQueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
5999 ASSERT_VK_SUCCESS(err);
6000 }
6001
6002 // Cleanup
6003 err = vkQueueWaitIdle(m_device->m_queue);
6004 ASSERT_VK_SUCCESS(err);
6005 vkDestroySemaphore(m_device->device(), export_semaphore, nullptr);
6006 vkDestroySemaphore(m_device->device(), import_semaphore, nullptr);
6007
6008 m_errorMonitor->VerifyNotFound();
6009 }
6010
TEST_F(VkPositiveLayerTest,ExternalFence)6011 TEST_F(VkPositiveLayerTest, ExternalFence) {
6012 #ifdef _WIN32
6013 const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
6014 const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
6015 #else
6016 const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
6017 const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
6018 #endif
6019 // Check for external fence instance extensions
6020 if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
6021 m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
6022 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
6023 } else {
6024 printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
6025 return;
6026 }
6027 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
6028
6029 // Check for external fence device extensions
6030 if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
6031 m_device_extension_names.push_back(extension_name);
6032 m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
6033 } else {
6034 printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
6035 return;
6036 }
6037 ASSERT_NO_FATAL_FAILURE(InitState());
6038
6039 // Check for external fence import and export capability
6040 VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
6041 VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
6042 auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vkGetInstanceProcAddr(
6043 instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
6044 vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
6045
6046 if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
6047 !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
6048 printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
6049 return;
6050 }
6051
6052 VkResult err;
6053 m_errorMonitor->ExpectSuccess();
6054
6055 // Create a fence to export payload from
6056 VkFence export_fence;
6057 {
6058 VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
6059 VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
6060 err = vkCreateFence(m_device->device(), &fci, nullptr, &export_fence);
6061 ASSERT_VK_SUCCESS(err);
6062 }
6063
6064 // Create a fence to import payload into
6065 VkFence import_fence;
6066 {
6067 VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
6068 err = vkCreateFence(m_device->device(), &fci, nullptr, &import_fence);
6069 ASSERT_VK_SUCCESS(err);
6070 }
6071
6072 #ifdef _WIN32
6073 // Export fence payload to an opaque handle
6074 HANDLE handle = nullptr;
6075 {
6076 VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
6077 auto vkGetFenceWin32HandleKHR =
6078 (PFN_vkGetFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
6079 err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
6080 ASSERT_VK_SUCCESS(err);
6081 }
6082
6083 // Import opaque handle exported above
6084 {
6085 VkImportFenceWin32HandleInfoKHR ifi = {
6086 VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, nullptr, import_fence, 0, handle_type, handle, nullptr};
6087 auto vkImportFenceWin32HandleKHR =
6088 (PFN_vkImportFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
6089 err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
6090 ASSERT_VK_SUCCESS(err);
6091 }
6092 #else
6093 // Export fence payload to an opaque handle
6094 int fd = 0;
6095 {
6096 VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
6097 auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
6098 err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
6099 ASSERT_VK_SUCCESS(err);
6100 }
6101
6102 // Import opaque handle exported above
6103 {
6104 VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr, import_fence, 0, handle_type, fd};
6105 auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
6106 err = vkImportFenceFdKHR(m_device->device(), &ifi);
6107 ASSERT_VK_SUCCESS(err);
6108 }
6109 #endif
6110
6111 // Signal the exported fence and wait on the imported fence
6112 vkQueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
6113 vkWaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
6114 vkResetFences(m_device->device(), 1, &import_fence);
6115 vkQueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
6116 vkWaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
6117 vkResetFences(m_device->device(), 1, &import_fence);
6118
6119 // Signal the imported fence and wait on the exported fence
6120 vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
6121 vkWaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
6122 vkResetFences(m_device->device(), 1, &export_fence);
6123 vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
6124 vkWaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
6125 vkResetFences(m_device->device(), 1, &export_fence);
6126
6127 // Cleanup
6128 err = vkQueueWaitIdle(m_device->m_queue);
6129 ASSERT_VK_SUCCESS(err);
6130 vkDestroyFence(m_device->device(), export_fence, nullptr);
6131 vkDestroyFence(m_device->device(), import_fence, nullptr);
6132
6133 m_errorMonitor->VerifyNotFound();
6134 }
6135
TEST_F(VkPositiveLayerTest,ThreadNullFenceCollision)6136 TEST_F(VkPositiveLayerTest, ThreadNullFenceCollision) {
6137 test_platform_thread thread;
6138
6139 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
6140
6141 ASSERT_NO_FATAL_FAILURE(Init());
6142
6143 struct thread_data_struct data;
6144 data.device = m_device->device();
6145 data.bailout = false;
6146 m_errorMonitor->SetBailout(&data.bailout);
6147
6148 // Call vkDestroyFence of VK_NULL_HANDLE repeatedly using multiple threads.
6149 // There should be no validation error from collision of that non-object.
6150 test_platform_thread_create(&thread, ReleaseNullFence, (void *)&data);
6151 for (int i = 0; i < 40000; i++) {
6152 vkDestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
6153 }
6154 test_platform_thread_join(thread, NULL);
6155
6156 m_errorMonitor->SetBailout(NULL);
6157
6158 m_errorMonitor->VerifyNotFound();
6159 }
6160
TEST_F(VkPositiveLayerTest,ClearColorImageWithValidRange)6161 TEST_F(VkPositiveLayerTest, ClearColorImageWithValidRange) {
6162 TEST_DESCRIPTION("Record clear color with a valid VkImageSubresourceRange");
6163
6164 ASSERT_NO_FATAL_FAILURE(Init());
6165 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6166
6167 VkImageObj image(m_device);
6168 image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
6169 ASSERT_TRUE(image.create_info().arrayLayers == 1);
6170 ASSERT_TRUE(image.initialized());
6171 image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
6172
6173 const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
6174
6175 m_commandBuffer->begin();
6176 const auto cb_handle = m_commandBuffer->handle();
6177
6178 // Try good case
6179 {
6180 m_errorMonitor->ExpectSuccess();
6181 VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
6182 vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
6183 m_errorMonitor->VerifyNotFound();
6184 }
6185
6186 // Try good case with VK_REMAINING
6187 {
6188 m_errorMonitor->ExpectSuccess();
6189 VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
6190 vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
6191 m_errorMonitor->VerifyNotFound();
6192 }
6193 }
6194
TEST_F(VkPositiveLayerTest,ClearDepthStencilWithValidRange)6195 TEST_F(VkPositiveLayerTest, ClearDepthStencilWithValidRange) {
6196 TEST_DESCRIPTION("Record clear depth with a valid VkImageSubresourceRange");
6197
6198 ASSERT_NO_FATAL_FAILURE(Init());
6199 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6200
6201 auto depth_format = FindSupportedDepthStencilFormat(gpu());
6202 if (!depth_format) {
6203 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
6204 return;
6205 }
6206
6207 VkImageObj image(m_device);
6208 image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
6209 ASSERT_TRUE(image.create_info().arrayLayers == 1);
6210 ASSERT_TRUE(image.initialized());
6211 const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
6212 image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
6213
6214 const VkClearDepthStencilValue clear_value = {};
6215
6216 m_commandBuffer->begin();
6217 const auto cb_handle = m_commandBuffer->handle();
6218
6219 // Try good case
6220 {
6221 m_errorMonitor->ExpectSuccess();
6222 VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 1};
6223 vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
6224 m_errorMonitor->VerifyNotFound();
6225 }
6226
6227 // Try good case with VK_REMAINING
6228 {
6229 m_errorMonitor->ExpectSuccess();
6230 VkImageSubresourceRange range = {ds_aspect, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
6231 vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
6232 m_errorMonitor->VerifyNotFound();
6233 }
6234 }
6235
TEST_F(VkPositiveLayerTest,CreateGraphicsPipelineWithIgnoredPointers)6236 TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineWithIgnoredPointers) {
6237 TEST_DESCRIPTION("Create Graphics Pipeline with pointers that must be ignored by layers");
6238
6239 ASSERT_NO_FATAL_FAILURE(Init());
6240
6241 m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
6242 ASSERT_TRUE(m_depth_stencil_fmt != 0);
6243
6244 m_depthStencil->Init(m_device, static_cast<int32_t>(m_width), static_cast<int32_t>(m_height), m_depth_stencil_fmt);
6245
6246 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(m_depthStencil->BindInfo()));
6247
6248 const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
6249 const uint64_t fake_address_32 = 0xCDCDCDCD;
6250 void *hopefully_undereferencable_pointer =
6251 sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
6252
6253 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
6254
6255 const VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info{
6256 VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
6257 nullptr, // pNext
6258 0, // flags
6259 0,
6260 nullptr, // bindings
6261 0,
6262 nullptr // attributes
6263 };
6264
6265 const VkPipelineInputAssemblyStateCreateInfo pipeline_input_assembly_state_create_info{
6266 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
6267 nullptr, // pNext
6268 0, // flags
6269 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
6270 VK_FALSE // primitive restart
6271 };
6272
6273 const VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info_template{
6274 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
6275 nullptr, // pNext
6276 0, // flags
6277 VK_FALSE, // depthClamp
6278 VK_FALSE, // rasterizerDiscardEnable
6279 VK_POLYGON_MODE_FILL,
6280 VK_CULL_MODE_NONE,
6281 VK_FRONT_FACE_COUNTER_CLOCKWISE,
6282 VK_FALSE, // depthBias
6283 0.0f,
6284 0.0f,
6285 0.0f, // depthBias params
6286 1.0f // lineWidth
6287 };
6288
6289 VkPipelineLayout pipeline_layout;
6290 {
6291 VkPipelineLayoutCreateInfo pipeline_layout_create_info{
6292 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
6293 nullptr, // pNext
6294 0, // flags
6295 0,
6296 nullptr, // layouts
6297 0,
6298 nullptr // push constants
6299 };
6300
6301 VkResult err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_create_info, nullptr, &pipeline_layout);
6302 ASSERT_VK_SUCCESS(err);
6303 }
6304
6305 // try disabled rasterizer and no tessellation
6306 {
6307 m_errorMonitor->ExpectSuccess();
6308
6309 VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
6310 pipeline_rasterization_state_create_info_template;
6311 pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_TRUE;
6312
6313 VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
6314 VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
6315 nullptr, // pNext
6316 0, // flags
6317 1, // stageCount
6318 &vs.GetStageCreateInfo(),
6319 &pipeline_vertex_input_state_create_info,
6320 &pipeline_input_assembly_state_create_info,
6321 reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>(hopefully_undereferencable_pointer),
6322 reinterpret_cast<const VkPipelineViewportStateCreateInfo *>(hopefully_undereferencable_pointer),
6323 &pipeline_rasterization_state_create_info,
6324 reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>(hopefully_undereferencable_pointer),
6325 reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
6326 reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
6327 nullptr, // dynamic states
6328 pipeline_layout,
6329 m_renderPass,
6330 0, // subpass
6331 VK_NULL_HANDLE,
6332 0};
6333
6334 VkPipeline pipeline;
6335 vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
6336
6337 m_errorMonitor->VerifyNotFound();
6338
6339 vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
6340 }
6341
6342 const VkPipelineMultisampleStateCreateInfo pipeline_multisample_state_create_info{
6343 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
6344 nullptr, // pNext
6345 0, // flags
6346 VK_SAMPLE_COUNT_1_BIT,
6347 VK_FALSE, // sample shading
6348 0.0f, // minSampleShading
6349 nullptr, // pSampleMask
6350 VK_FALSE, // alphaToCoverageEnable
6351 VK_FALSE // alphaToOneEnable
6352 };
6353
6354 // try enabled rasterizer but no subpass attachments
6355 {
6356 m_errorMonitor->ExpectSuccess();
6357
6358 VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
6359 pipeline_rasterization_state_create_info_template;
6360 pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
6361
6362 VkViewport viewport = {0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f};
6363 VkRect2D scissor = {{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}};
6364
6365 const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
6366 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
6367 nullptr, // pNext
6368 0, // flags
6369 1,
6370 &viewport,
6371 1,
6372 &scissor};
6373
6374 VkRenderPass render_pass;
6375 {
6376 VkSubpassDescription subpass_desc = {};
6377
6378 VkRenderPassCreateInfo render_pass_create_info{
6379 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
6380 nullptr, // pNext
6381 0, // flags
6382 0,
6383 nullptr, // attachments
6384 1,
6385 &subpass_desc,
6386 0,
6387 nullptr // subpass dependencies
6388 };
6389
6390 VkResult err = vkCreateRenderPass(m_device->handle(), &render_pass_create_info, nullptr, &render_pass);
6391 ASSERT_VK_SUCCESS(err);
6392 }
6393
6394 VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
6395 VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
6396 nullptr, // pNext
6397 0, // flags
6398 1, // stageCount
6399 &vs.GetStageCreateInfo(),
6400 &pipeline_vertex_input_state_create_info,
6401 &pipeline_input_assembly_state_create_info,
6402 nullptr,
6403 &pipeline_viewport_state_create_info,
6404 &pipeline_rasterization_state_create_info,
6405 &pipeline_multisample_state_create_info,
6406 reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
6407 reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
6408 nullptr, // dynamic states
6409 pipeline_layout,
6410 render_pass,
6411 0, // subpass
6412 VK_NULL_HANDLE,
6413 0};
6414
6415 VkPipeline pipeline;
6416 vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
6417
6418 m_errorMonitor->VerifyNotFound();
6419
6420 vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
6421 vkDestroyRenderPass(m_device->handle(), render_pass, nullptr);
6422 }
6423
6424 // try dynamic viewport and scissor
6425 {
6426 m_errorMonitor->ExpectSuccess();
6427
6428 VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
6429 pipeline_rasterization_state_create_info_template;
6430 pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
6431
6432 const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
6433 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
6434 nullptr, // pNext
6435 0, // flags
6436 1,
6437 reinterpret_cast<const VkViewport *>(hopefully_undereferencable_pointer),
6438 1,
6439 reinterpret_cast<const VkRect2D *>(hopefully_undereferencable_pointer)};
6440
6441 const VkPipelineDepthStencilStateCreateInfo pipeline_depth_stencil_state_create_info{
6442 VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
6443 nullptr, // pNext
6444 0, // flags
6445 };
6446
6447 const VkPipelineColorBlendAttachmentState pipeline_color_blend_attachment_state = {};
6448
6449 const VkPipelineColorBlendStateCreateInfo pipeline_color_blend_state_create_info{
6450 VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
6451 nullptr, // pNext
6452 0, // flags
6453 VK_FALSE,
6454 VK_LOGIC_OP_CLEAR,
6455 1,
6456 &pipeline_color_blend_attachment_state,
6457 {0.0f, 0.0f, 0.0f, 0.0f}};
6458
6459 const VkDynamicState dynamic_states[2] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
6460
6461 const VkPipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info{
6462 VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
6463 nullptr, // pNext
6464 0, // flags
6465 2, dynamic_states};
6466
6467 VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
6468 nullptr, // pNext
6469 0, // flags
6470 1, // stageCount
6471 &vs.GetStageCreateInfo(),
6472 &pipeline_vertex_input_state_create_info,
6473 &pipeline_input_assembly_state_create_info,
6474 nullptr,
6475 &pipeline_viewport_state_create_info,
6476 &pipeline_rasterization_state_create_info,
6477 &pipeline_multisample_state_create_info,
6478 &pipeline_depth_stencil_state_create_info,
6479 &pipeline_color_blend_state_create_info,
6480 &pipeline_dynamic_state_create_info, // dynamic states
6481 pipeline_layout,
6482 m_renderPass,
6483 0, // subpass
6484 VK_NULL_HANDLE,
6485 0};
6486
6487 VkPipeline pipeline;
6488 vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
6489
6490 m_errorMonitor->VerifyNotFound();
6491
6492 vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
6493 }
6494
6495 vkDestroyPipelineLayout(m_device->handle(), pipeline_layout, nullptr);
6496 }
6497
TEST_F(VkPositiveLayerTest,ExternalMemory)6498 TEST_F(VkPositiveLayerTest, ExternalMemory) {
6499 TEST_DESCRIPTION("Perform a copy through a pair of buffers linked by external memory");
6500
6501 #ifdef _WIN32
6502 const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME;
6503 const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
6504 #else
6505 const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME;
6506 const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
6507 #endif
6508
6509 // Check for external memory instance extensions
6510 std::vector<const char *> reqd_instance_extensions = {
6511 {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
6512 for (auto extension_name : reqd_instance_extensions) {
6513 if (InstanceExtensionSupported(extension_name)) {
6514 m_instance_extension_names.push_back(extension_name);
6515 } else {
6516 printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
6517 return;
6518 }
6519 }
6520
6521 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
6522
6523 // Check for import/export capability
6524 VkPhysicalDeviceExternalBufferInfoKHR ebi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, nullptr, 0,
6525 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, handle_type};
6526 VkExternalBufferPropertiesKHR ebp = {VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, nullptr, {0, 0, 0}};
6527 auto vkGetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)vkGetInstanceProcAddr(
6528 instance(), "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
6529 ASSERT_TRUE(vkGetPhysicalDeviceExternalBufferPropertiesKHR != nullptr);
6530 vkGetPhysicalDeviceExternalBufferPropertiesKHR(gpu(), &ebi, &ebp);
6531 if (!(ebp.externalMemoryProperties.compatibleHandleTypes & handle_type) ||
6532 !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR) ||
6533 !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR)) {
6534 printf("%s External buffer does not support importing and exporting, skipping test\n", kSkipPrefix);
6535 return;
6536 }
6537
6538 // Check if dedicated allocation is required
6539 bool dedicated_allocation =
6540 ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR;
6541 if (dedicated_allocation) {
6542 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
6543 m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
6544 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
6545 } else {
6546 printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
6547 return;
6548 }
6549 }
6550
6551 // Check for external memory device extensions
6552 if (DeviceExtensionSupported(gpu(), nullptr, ext_mem_extension_name)) {
6553 m_device_extension_names.push_back(ext_mem_extension_name);
6554 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
6555 } else {
6556 printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
6557 return;
6558 }
6559 ASSERT_NO_FATAL_FAILURE(InitState());
6560
6561 m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
6562
6563 VkMemoryPropertyFlags mem_flags = 0;
6564 const VkDeviceSize buffer_size = 1024;
6565
6566 // Create export and import buffers
6567 const VkExternalMemoryBufferCreateInfoKHR external_buffer_info = {VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
6568 nullptr, handle_type};
6569 auto buffer_info = VkBufferObj::create_info(buffer_size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
6570 buffer_info.pNext = &external_buffer_info;
6571 VkBufferObj buffer_export;
6572 buffer_export.init_no_mem(*m_device, buffer_info);
6573 VkBufferObj buffer_import;
6574 buffer_import.init_no_mem(*m_device, buffer_info);
6575
6576 // Allocation info
6577 auto alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_export.memory_requirements(), mem_flags);
6578
6579 // Add export allocation info to pNext chain
6580 VkExportMemoryAllocateInfoKHR export_info = {VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, nullptr, handle_type};
6581 alloc_info.pNext = &export_info;
6582
6583 // Add dedicated allocation info to pNext chain if required
6584 VkMemoryDedicatedAllocateInfoKHR dedicated_info = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, nullptr,
6585 VK_NULL_HANDLE, buffer_export.handle()};
6586 if (dedicated_allocation) {
6587 export_info.pNext = &dedicated_info;
6588 }
6589
6590 // Allocate memory to be exported
6591 vk_testing::DeviceMemory memory_export;
6592 memory_export.init(*m_device, alloc_info);
6593
6594 // Bind exported memory
6595 buffer_export.bind_memory(memory_export, 0);
6596
6597 #ifdef _WIN32
6598 // Export memory to handle
6599 auto vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)vkGetInstanceProcAddr(instance(), "vkGetMemoryWin32HandleKHR");
6600 ASSERT_TRUE(vkGetMemoryWin32HandleKHR != nullptr);
6601 VkMemoryGetWin32HandleInfoKHR mghi = {VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, nullptr, memory_export.handle(),
6602 handle_type};
6603 HANDLE handle;
6604 ASSERT_VK_SUCCESS(vkGetMemoryWin32HandleKHR(m_device->device(), &mghi, &handle));
6605
6606 VkImportMemoryWin32HandleInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, nullptr, handle_type,
6607 handle};
6608 #else
6609 // Export memory to fd
6610 auto vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)vkGetInstanceProcAddr(instance(), "vkGetMemoryFdKHR");
6611 ASSERT_TRUE(vkGetMemoryFdKHR != nullptr);
6612 VkMemoryGetFdInfoKHR mgfi = {VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, nullptr, memory_export.handle(), handle_type};
6613 int fd;
6614 ASSERT_VK_SUCCESS(vkGetMemoryFdKHR(m_device->device(), &mgfi, &fd));
6615
6616 VkImportMemoryFdInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, nullptr, handle_type, fd};
6617 #endif
6618
6619 // Import memory
6620 alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_import.memory_requirements(), mem_flags);
6621 alloc_info.pNext = &import_info;
6622 vk_testing::DeviceMemory memory_import;
6623 memory_import.init(*m_device, alloc_info);
6624
6625 // Bind imported memory
6626 buffer_import.bind_memory(memory_import, 0);
6627
6628 // Create test buffers and fill input buffer
6629 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
6630 VkBufferObj buffer_input;
6631 buffer_input.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
6632 auto input_mem = (uint8_t *)buffer_input.memory().map();
6633 for (uint32_t i = 0; i < buffer_size; i++) {
6634 input_mem[i] = (i & 0xFF);
6635 }
6636 buffer_input.memory().unmap();
6637 VkBufferObj buffer_output;
6638 buffer_output.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
6639
6640 // Copy from input buffer to output buffer through the exported/imported memory
6641 m_commandBuffer->begin();
6642 VkBufferCopy copy_info = {0, 0, buffer_size};
6643 vkCmdCopyBuffer(m_commandBuffer->handle(), buffer_input.handle(), buffer_export.handle(), 1, ©_info);
6644 // Insert memory barrier to guarantee copy order
6645 VkMemoryBarrier mem_barrier = {VK_STRUCTURE_TYPE_MEMORY_BARRIER, nullptr, VK_ACCESS_TRANSFER_WRITE_BIT,
6646 VK_ACCESS_TRANSFER_READ_BIT};
6647 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1,
6648 &mem_barrier, 0, nullptr, 0, nullptr);
6649 vkCmdCopyBuffer(m_commandBuffer->handle(), buffer_import.handle(), buffer_output.handle(), 1, ©_info);
6650 m_commandBuffer->end();
6651 m_commandBuffer->QueueCommandBuffer();
6652
6653 m_errorMonitor->VerifyNotFound();
6654 }
6655
TEST_F(VkPositiveLayerTest,ParameterLayerFeatures2Capture)6656 TEST_F(VkPositiveLayerTest, ParameterLayerFeatures2Capture) {
6657 TEST_DESCRIPTION("Ensure parameter_validation_layer correctly captures physical device features");
6658 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
6659 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
6660 } else {
6661 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
6662 return;
6663 }
6664
6665 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
6666
6667 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
6668 (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
6669 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
6670
6671 VkResult err;
6672 m_errorMonitor->ExpectSuccess();
6673
6674 VkPhysicalDeviceFeatures2KHR features2;
6675 features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
6676 features2.pNext = nullptr;
6677
6678 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
6679
6680 // We're not creating a valid m_device, but the phy wrapper is useful
6681 vk_testing::PhysicalDevice physical_device(gpu());
6682 vk_testing::QueueCreateInfoArray queue_info(physical_device.queue_properties());
6683 // Only request creation with queuefamilies that have at least one queue
6684 std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
6685 auto qci = queue_info.data();
6686 for (uint32_t i = 0; i < queue_info.size(); ++i) {
6687 if (qci[i].queueCount) {
6688 create_queue_infos.push_back(qci[i]);
6689 }
6690 }
6691
6692 VkDeviceCreateInfo dev_info = {};
6693 dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
6694 dev_info.pNext = &features2;
6695 dev_info.flags = 0;
6696 dev_info.queueCreateInfoCount = create_queue_infos.size();
6697 dev_info.pQueueCreateInfos = create_queue_infos.data();
6698 dev_info.enabledLayerCount = 0;
6699 dev_info.ppEnabledLayerNames = nullptr;
6700 dev_info.enabledExtensionCount = 0;
6701 dev_info.ppEnabledExtensionNames = nullptr;
6702 dev_info.pEnabledFeatures = nullptr;
6703
6704 VkDevice device;
6705 err = vkCreateDevice(gpu(), &dev_info, nullptr, &device);
6706 ASSERT_VK_SUCCESS(err);
6707
6708 if (features2.features.samplerAnisotropy) {
6709 // Test that the parameter layer is caching the features correctly using CreateSampler
6710 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
6711 // If the features were not captured correctly, this should cause an error
6712 sampler_ci.anisotropyEnable = VK_TRUE;
6713 sampler_ci.maxAnisotropy = physical_device.properties().limits.maxSamplerAnisotropy;
6714
6715 VkSampler sampler = VK_NULL_HANDLE;
6716 err = vkCreateSampler(device, &sampler_ci, nullptr, &sampler);
6717 ASSERT_VK_SUCCESS(err);
6718 vkDestroySampler(device, sampler, nullptr);
6719 } else {
6720 printf("%s Feature samplerAnisotropy not enabled; parameter_layer check skipped.\n", kSkipPrefix);
6721 }
6722
6723 // Verify the core validation layer has captured the physical device features by creating a a query pool.
6724 if (features2.features.pipelineStatisticsQuery) {
6725 VkQueryPool query_pool;
6726 VkQueryPoolCreateInfo qpci{};
6727 qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
6728 qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
6729 qpci.queryCount = 1;
6730 err = vkCreateQueryPool(device, &qpci, nullptr, &query_pool);
6731 ASSERT_VK_SUCCESS(err);
6732
6733 vkDestroyQueryPool(device, query_pool, nullptr);
6734 } else {
6735 printf("%s Feature pipelineStatisticsQuery not enabled; core_validation_layer check skipped.\n", kSkipPrefix);
6736 }
6737
6738 vkDestroyDevice(device, nullptr);
6739
6740 m_errorMonitor->VerifyNotFound();
6741 }
6742
TEST_F(VkPositiveLayerTest,GetMemoryRequirements2)6743 TEST_F(VkPositiveLayerTest, GetMemoryRequirements2) {
6744 TEST_DESCRIPTION(
6745 "Get memory requirements with VK_KHR_get_memory_requirements2 instead of core entry points and verify layers do not emit "
6746 "errors when objects are bound and used");
6747
6748 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
6749
6750 // Check for VK_KHR_get_memory_requirementes2 extensions
6751 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
6752 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
6753 } else {
6754 printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
6755 return;
6756 }
6757
6758 ASSERT_NO_FATAL_FAILURE(InitState());
6759
6760 m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
6761
6762 // Create a test buffer
6763 VkBufferObj buffer;
6764 buffer.init_no_mem(*m_device,
6765 VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT));
6766
6767 // Use extension to get buffer memory requirements
6768 auto vkGetBufferMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(
6769 vkGetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2KHR"));
6770 ASSERT_TRUE(vkGetBufferMemoryRequirements2KHR != nullptr);
6771 VkBufferMemoryRequirementsInfo2KHR buffer_info = {VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
6772 buffer.handle()};
6773 VkMemoryRequirements2KHR buffer_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
6774 vkGetBufferMemoryRequirements2KHR(m_device->device(), &buffer_info, &buffer_reqs);
6775
6776 // Allocate and bind buffer memory
6777 vk_testing::DeviceMemory buffer_memory;
6778 buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_reqs.memoryRequirements, 0));
6779 vkBindBufferMemory(m_device->device(), buffer.handle(), buffer_memory.handle(), 0);
6780
6781 // Create a test image
6782 auto image_ci = vk_testing::Image::create_info();
6783 image_ci.imageType = VK_IMAGE_TYPE_2D;
6784 image_ci.extent.width = 32;
6785 image_ci.extent.height = 32;
6786 image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
6787 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
6788 image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
6789 vk_testing::Image image;
6790 image.init_no_mem(*m_device, image_ci);
6791
6792 // Use extension to get image memory requirements
6793 auto vkGetImageMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(
6794 vkGetDeviceProcAddr(m_device->device(), "vkGetImageMemoryRequirements2KHR"));
6795 ASSERT_TRUE(vkGetImageMemoryRequirements2KHR != nullptr);
6796 VkImageMemoryRequirementsInfo2KHR image_info = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
6797 image.handle()};
6798 VkMemoryRequirements2KHR image_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
6799 vkGetImageMemoryRequirements2KHR(m_device->device(), &image_info, &image_reqs);
6800
6801 // Allocate and bind image memory
6802 vk_testing::DeviceMemory image_memory;
6803 image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image_reqs.memoryRequirements, 0));
6804 vkBindImageMemory(m_device->device(), image.handle(), image_memory.handle(), 0);
6805
6806 // Now execute arbitrary commands that use the test buffer and image
6807 m_commandBuffer->begin();
6808
6809 // Fill buffer with 0
6810 vkCmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
6811
6812 // Transition and clear image
6813 const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
6814 const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
6815 VK_IMAGE_LAYOUT_GENERAL, subresource_range);
6816 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
6817 nullptr, 0, nullptr, 1, &barrier);
6818 const VkClearColorValue color = {};
6819 vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
6820
6821 // Submit and verify no validation errors
6822 m_commandBuffer->end();
6823 m_commandBuffer->QueueCommandBuffer();
6824 m_errorMonitor->VerifyNotFound();
6825 }
6826
TEST_F(VkPositiveLayerTest,BindMemory2)6827 TEST_F(VkPositiveLayerTest, BindMemory2) {
6828 TEST_DESCRIPTION(
6829 "Bind memory with VK_KHR_bind_memory2 instead of core entry points and verify layers do not emit errors when objects are "
6830 "used");
6831
6832 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
6833
6834 // Check for VK_KHR_get_memory_requirementes2 extensions
6835 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME)) {
6836 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
6837 } else {
6838 printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
6839 return;
6840 }
6841
6842 ASSERT_NO_FATAL_FAILURE(InitState());
6843
6844 m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
6845
6846 // Create a test buffer
6847 VkBufferObj buffer;
6848 buffer.init_no_mem(*m_device, VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
6849
6850 // Allocate buffer memory
6851 vk_testing::DeviceMemory buffer_memory;
6852 buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), 0));
6853
6854 // Bind buffer memory with extension
6855 auto vkBindBufferMemory2KHR =
6856 reinterpret_cast<PFN_vkBindBufferMemory2KHR>(vkGetDeviceProcAddr(m_device->device(), "vkBindBufferMemory2KHR"));
6857 ASSERT_TRUE(vkBindBufferMemory2KHR != nullptr);
6858 VkBindBufferMemoryInfoKHR buffer_bind_info = {VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, nullptr, buffer.handle(),
6859 buffer_memory.handle(), 0};
6860 vkBindBufferMemory2KHR(m_device->device(), 1, &buffer_bind_info);
6861
6862 // Create a test image
6863 auto image_ci = vk_testing::Image::create_info();
6864 image_ci.imageType = VK_IMAGE_TYPE_2D;
6865 image_ci.extent.width = 32;
6866 image_ci.extent.height = 32;
6867 image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
6868 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
6869 image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
6870 vk_testing::Image image;
6871 image.init_no_mem(*m_device, image_ci);
6872
6873 // Allocate image memory
6874 vk_testing::DeviceMemory image_memory;
6875 image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), 0));
6876
6877 // Bind image memory with extension
6878 auto vkBindImageMemory2KHR =
6879 reinterpret_cast<PFN_vkBindImageMemory2KHR>(vkGetDeviceProcAddr(m_device->device(), "vkBindImageMemory2KHR"));
6880 ASSERT_TRUE(vkBindImageMemory2KHR != nullptr);
6881 VkBindImageMemoryInfoKHR image_bind_info = {VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, nullptr, image.handle(),
6882 image_memory.handle(), 0};
6883 vkBindImageMemory2KHR(m_device->device(), 1, &image_bind_info);
6884
6885 // Now execute arbitrary commands that use the test buffer and image
6886 m_commandBuffer->begin();
6887
6888 // Fill buffer with 0
6889 vkCmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
6890
6891 // Transition and clear image
6892 const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
6893 const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
6894 VK_IMAGE_LAYOUT_GENERAL, subresource_range);
6895 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
6896 nullptr, 0, nullptr, 1, &barrier);
6897 const VkClearColorValue color = {};
6898 vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
6899
6900 // Submit and verify no validation errors
6901 m_commandBuffer->end();
6902 m_commandBuffer->QueueCommandBuffer();
6903 m_errorMonitor->VerifyNotFound();
6904 }
6905
TEST_F(VkPositiveLayerTest,CreatePipelineWithCoreChecksDisabled)6906 TEST_F(VkPositiveLayerTest, CreatePipelineWithCoreChecksDisabled) {
6907 TEST_DESCRIPTION("Test CreatePipeline while the CoreChecks validation object is disabled");
6908
6909 // Enable KHR validation features extension
6910 VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT};
6911 VkValidationFeaturesEXT features = {};
6912 features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
6913 features.disabledValidationFeatureCount = 1;
6914 features.pDisabledValidationFeatures = disables;
6915
6916 VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
6917 ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
6918 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6919 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
6920 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
6921 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
6922 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
6923
6924 CreatePipelineHelper pipe(*this);
6925 pipe.InitInfo();
6926 pipe.gp_ci_.pInputAssemblyState = &iasci;
6927 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
6928 pipe.InitState();
6929 m_errorMonitor->ExpectSuccess();
6930 pipe.CreateGraphicsPipeline();
6931 m_errorMonitor->VerifyNotFound();
6932 }
6933
TEST_F(VkPositiveLayerTest,CreatePipeineWithTessellationDomainOrigin)6934 TEST_F(VkPositiveLayerTest, CreatePipeineWithTessellationDomainOrigin) {
6935 TEST_DESCRIPTION(
6936 "Test CreatePipeline when VkPipelineTessellationStateCreateInfo.pNext include "
6937 "VkPipelineTessellationDomainOriginStateCreateInfo");
6938
6939 ASSERT_NO_FATAL_FAILURE(Init());
6940 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6941
6942 if (!m_device->phy().features().tessellationShader) {
6943 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
6944 return;
6945 }
6946
6947 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
6948 VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
6949 VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
6950 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
6951
6952 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
6953 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
6954
6955 VkPipelineTessellationDomainOriginStateCreateInfo tessellationDomainOriginStateInfo = {
6956 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, VK_NULL_HANDLE,
6957 VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT};
6958
6959 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
6960 &tessellationDomainOriginStateInfo, 0, 3};
6961
6962 CreatePipelineHelper pipe(*this);
6963 pipe.InitInfo();
6964 pipe.gp_ci_.pTessellationState = &tsci;
6965 pipe.gp_ci_.pInputAssemblyState = &iasci;
6966 pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
6967 pipe.InitState();
6968 m_errorMonitor->ExpectSuccess();
6969 pipe.CreateGraphicsPipeline();
6970 m_errorMonitor->VerifyNotFound();
6971 }
6972
TEST_F(VkPositiveLayerTest,MultiplaneImageCopyBufferToImage)6973 TEST_F(VkPositiveLayerTest, MultiplaneImageCopyBufferToImage) {
6974 TEST_DESCRIPTION("Positive test of multiplane copy buffer to image");
6975 // Enable KHR multiplane req'd extensions
6976 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
6977 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
6978 if (mp_extensions) {
6979 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
6980 }
6981 SetTargetApiVersion(VK_API_VERSION_1_1);
6982 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
6983 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
6984 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
6985 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
6986 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
6987 if (mp_extensions) {
6988 m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
6989 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
6990 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
6991 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
6992 } else {
6993 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
6994 return;
6995 }
6996 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
6997 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6998
6999 VkImageCreateInfo ci = {};
7000 ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
7001 ci.pNext = NULL;
7002 ci.flags = 0;
7003 ci.imageType = VK_IMAGE_TYPE_2D;
7004 ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR; // All planes of equal extent
7005 ci.tiling = VK_IMAGE_TILING_OPTIMAL;
7006 ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
7007 ci.extent = {16, 16, 1};
7008 ci.mipLevels = 1;
7009 ci.arrayLayers = 1;
7010 ci.samples = VK_SAMPLE_COUNT_1_BIT;
7011 ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
7012 ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
7013
7014 VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
7015 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
7016 if (!supported) {
7017 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
7018 return; // Assume there's low ROI on searching for different mp formats
7019 }
7020
7021 VkImageObj image(m_device);
7022 image.init(&ci);
7023
7024 m_commandBuffer->reset();
7025 m_errorMonitor->ExpectSuccess();
7026 m_commandBuffer->begin();
7027 image.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_ACCESS_TRANSFER_WRITE_BIT,
7028 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
7029
7030 std::array<VkImageAspectFlagBits, 3> aspects = {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT,
7031 VK_IMAGE_ASPECT_PLANE_2_BIT};
7032 std::array<VkBufferObj, 3> buffers;
7033 VkMemoryPropertyFlags reqs = 0;
7034
7035 VkBufferImageCopy copy = {};
7036 copy.imageSubresource.layerCount = 1;
7037 copy.imageExtent.depth = 1;
7038 copy.imageExtent.height = 16;
7039 copy.imageExtent.width = 16;
7040
7041 for (size_t i = 0; i < aspects.size(); ++i) {
7042 buffers[i].init_as_src(*m_device, (VkDeviceSize)16 * 16 * 1, reqs);
7043 copy.imageSubresource.aspectMask = aspects[i];
7044 vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffers[i].handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
7045 1, ©);
7046 }
7047 m_commandBuffer->end();
7048 m_errorMonitor->VerifyNotFound();
7049 }
7050
TEST_F(VkPositiveLayerTest,MultiplaneImageTests)7051 TEST_F(VkPositiveLayerTest, MultiplaneImageTests) {
7052 TEST_DESCRIPTION("Positive test of multiplane image operations");
7053
7054 // Enable KHR multiplane req'd extensions
7055 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
7056 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
7057 if (mp_extensions) {
7058 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
7059 }
7060 SetTargetApiVersion(VK_API_VERSION_1_1);
7061 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7062 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
7063 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
7064 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
7065 if (mp_extensions) {
7066 m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
7067 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
7068 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
7069 } else {
7070 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
7071 return;
7072 }
7073 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
7074 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
7075
7076 // Create aliased function pointers for 1.0 and 1.1 contexts
7077
7078 PFN_vkBindImageMemory2KHR vkBindImageMemory2Function = nullptr;
7079 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2Function = nullptr;
7080 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2Function = nullptr;
7081
7082 if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
7083 vkBindImageMemory2Function = vkBindImageMemory2;
7084 vkGetImageMemoryRequirements2Function = vkGetImageMemoryRequirements2;
7085 vkGetPhysicalDeviceMemoryProperties2Function = vkGetPhysicalDeviceMemoryProperties2;
7086 } else {
7087 vkBindImageMemory2Function = (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_device->handle(), "vkBindImageMemory2KHR");
7088 vkGetImageMemoryRequirements2Function =
7089 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_device->handle(), "vkGetImageMemoryRequirements2KHR");
7090 vkGetPhysicalDeviceMemoryProperties2Function = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetDeviceProcAddr(
7091 m_device->handle(), "vkGetPhysicalDeviceMemoryProperties2KHR");
7092 }
7093
7094 if (!vkBindImageMemory2Function || !vkGetImageMemoryRequirements2Function || !vkGetPhysicalDeviceMemoryProperties2Function) {
7095 printf("%s Did not find required device extension support; test skipped.\n", kSkipPrefix);
7096 return;
7097 }
7098
7099 VkImageCreateInfo ci = {};
7100 ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
7101 ci.pNext = NULL;
7102 ci.flags = 0;
7103 ci.imageType = VK_IMAGE_TYPE_2D;
7104 ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR; // All planes of equal extent
7105 ci.tiling = VK_IMAGE_TILING_OPTIMAL;
7106 ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
7107 ci.extent = {128, 128, 1};
7108 ci.mipLevels = 1;
7109 ci.arrayLayers = 1;
7110 ci.samples = VK_SAMPLE_COUNT_1_BIT;
7111 ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
7112 ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
7113
7114 // Verify format
7115 VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
7116 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
7117 if (!supported) {
7118 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
7119 return; // Assume there's low ROI on searching for different mp formats
7120 }
7121
7122 VkImage image;
7123 ASSERT_VK_SUCCESS(vkCreateImage(device(), &ci, NULL, &image));
7124
7125 // Allocate & bind memory
7126 VkPhysicalDeviceMemoryProperties phys_mem_props;
7127 vkGetPhysicalDeviceMemoryProperties(gpu(), &phys_mem_props);
7128 VkMemoryRequirements mem_reqs;
7129 vkGetImageMemoryRequirements(device(), image, &mem_reqs);
7130 VkDeviceMemory mem_obj = VK_NULL_HANDLE;
7131 VkMemoryPropertyFlagBits mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7132 for (uint32_t type = 0; type < phys_mem_props.memoryTypeCount; type++) {
7133 if ((mem_reqs.memoryTypeBits & (1 << type)) &&
7134 ((phys_mem_props.memoryTypes[type].propertyFlags & mem_props) == mem_props)) {
7135 VkMemoryAllocateInfo alloc_info = {};
7136 alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
7137 alloc_info.allocationSize = mem_reqs.size;
7138 alloc_info.memoryTypeIndex = type;
7139 ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &mem_obj));
7140 break;
7141 }
7142 }
7143
7144 if (VK_NULL_HANDLE == mem_obj) {
7145 printf("%s Unable to allocate image memory. Skipping test.\n", kSkipPrefix);
7146 vkDestroyImage(device(), image, NULL);
7147 return;
7148 }
7149 ASSERT_VK_SUCCESS(vkBindImageMemory(device(), image, mem_obj, 0));
7150
7151 // Copy plane 0 to plane 2
7152 VkImageCopy copyRegion = {};
7153 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
7154 copyRegion.srcSubresource.mipLevel = 0;
7155 copyRegion.srcSubresource.baseArrayLayer = 0;
7156 copyRegion.srcSubresource.layerCount = 1;
7157 copyRegion.srcOffset = {0, 0, 0};
7158 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
7159 copyRegion.dstSubresource.mipLevel = 0;
7160 copyRegion.dstSubresource.baseArrayLayer = 0;
7161 copyRegion.dstSubresource.layerCount = 1;
7162 copyRegion.dstOffset = {0, 0, 0};
7163 copyRegion.extent.width = 128;
7164 copyRegion.extent.height = 128;
7165 copyRegion.extent.depth = 1;
7166
7167 m_errorMonitor->ExpectSuccess();
7168 m_commandBuffer->begin();
7169 m_commandBuffer->CopyImage(image, VK_IMAGE_LAYOUT_GENERAL, image, VK_IMAGE_LAYOUT_GENERAL, 1, ©Region);
7170 m_commandBuffer->end();
7171 m_errorMonitor->VerifyNotFound();
7172
7173 vkFreeMemory(device(), mem_obj, NULL);
7174 vkDestroyImage(device(), image, NULL);
7175
7176 // Repeat bind test on a DISJOINT multi-planar image, with per-plane memory objects, using API2 variants
7177 //
7178 features |= VK_FORMAT_FEATURE_DISJOINT_BIT;
7179 ci.flags = VK_IMAGE_CREATE_DISJOINT_BIT;
7180 if (ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features)) {
7181 ASSERT_VK_SUCCESS(vkCreateImage(device(), &ci, NULL, &image));
7182
7183 // Allocate & bind memory
7184 VkPhysicalDeviceMemoryProperties2 phys_mem_props2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2};
7185 vkGetPhysicalDeviceMemoryProperties2Function(gpu(), &phys_mem_props2);
7186 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO};
7187 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
7188 mem_req_info2.pNext = &image_plane_req;
7189 mem_req_info2.image = image;
7190 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
7191
7192 VkDeviceMemory p0_mem, p1_mem, p2_mem;
7193 mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7194 VkMemoryAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
7195
7196 // Plane 0
7197 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
7198 vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
7199 uint32_t mem_type = 0;
7200 for (mem_type = 0; mem_type < phys_mem_props2.memoryProperties.memoryTypeCount; mem_type++) {
7201 if ((mem_reqs2.memoryRequirements.memoryTypeBits & (1 << mem_type)) &&
7202 ((phys_mem_props2.memoryProperties.memoryTypes[mem_type].propertyFlags & mem_props) == mem_props)) {
7203 alloc_info.memoryTypeIndex = mem_type;
7204 break;
7205 }
7206 }
7207 alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
7208 ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p0_mem));
7209
7210 // Plane 1 & 2 use same memory type
7211 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
7212 vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
7213 alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
7214 ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p1_mem));
7215
7216 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
7217 vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
7218 alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
7219 ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p2_mem));
7220
7221 // Set up 3-plane binding
7222 VkBindImageMemoryInfo bind_info[3];
7223 for (int plane = 0; plane < 3; plane++) {
7224 bind_info[plane].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
7225 bind_info[plane].pNext = nullptr;
7226 bind_info[plane].image = image;
7227 bind_info[plane].memoryOffset = 0;
7228 }
7229 bind_info[0].memory = p0_mem;
7230 bind_info[1].memory = p1_mem;
7231 bind_info[2].memory = p2_mem;
7232
7233 m_errorMonitor->ExpectSuccess();
7234 vkBindImageMemory2Function(device(), 3, bind_info);
7235 m_errorMonitor->VerifyNotFound();
7236
7237 vkFreeMemory(device(), p0_mem, NULL);
7238 vkFreeMemory(device(), p1_mem, NULL);
7239 vkFreeMemory(device(), p2_mem, NULL);
7240 vkDestroyImage(device(), image, NULL);
7241 }
7242
7243 // Test that changing the layout of ASPECT_COLOR also changes the layout of the individual planes
7244 VkBufferObj buffer;
7245 VkMemoryPropertyFlags reqs = 0;
7246 buffer.init_as_src(*m_device, (VkDeviceSize)128 * 128 * 3, reqs);
7247 VkImageObj mpimage(m_device);
7248 mpimage.Init(256, 256, 1, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
7249 VK_IMAGE_TILING_OPTIMAL, 0);
7250 VkBufferImageCopy copy_region = {};
7251 copy_region.bufferRowLength = 128;
7252 copy_region.bufferImageHeight = 128;
7253 copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
7254 copy_region.imageSubresource.layerCount = 1;
7255 copy_region.imageExtent.height = 64;
7256 copy_region.imageExtent.width = 64;
7257 copy_region.imageExtent.depth = 1;
7258
7259 vkResetCommandBuffer(m_commandBuffer->handle(), 0);
7260 m_commandBuffer->begin();
7261 mpimage.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
7262 vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), mpimage.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
7263 ©_region);
7264 m_commandBuffer->end();
7265 m_commandBuffer->QueueCommandBuffer(false);
7266 m_errorMonitor->VerifyNotFound();
7267
7268 // Test to verify that views of multiplanar images have layouts tracked correctly
7269 // by changing the image's layout then using a view of that image
7270 VkImageView view;
7271 VkImageViewCreateInfo ivci = {};
7272 ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
7273 ivci.image = mpimage.handle();
7274 ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
7275 ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
7276 ivci.subresourceRange.layerCount = 1;
7277 ivci.subresourceRange.baseMipLevel = 0;
7278 ivci.subresourceRange.levelCount = 1;
7279 ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7280 vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
7281
7282 OneOffDescriptorSet descriptor_set(m_device,
7283 {
7284 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
7285 });
7286
7287 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
7288 VkSampler sampler;
7289
7290 VkResult err;
7291 err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
7292 ASSERT_VK_SUCCESS(err);
7293
7294 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
7295 descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
7296 descriptor_set.UpdateDescriptorSets();
7297
7298 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
7299 VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
7300 VkPipelineObj pipe(m_device);
7301 pipe.AddShader(&vs);
7302 pipe.AddShader(&fs);
7303 pipe.AddDefaultColorAttachment();
7304 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
7305
7306 m_errorMonitor->ExpectSuccess();
7307 m_commandBuffer->begin();
7308 VkImageMemoryBarrier img_barrier = {};
7309 img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
7310 img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
7311 img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
7312 img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
7313 img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
7314 img_barrier.image = mpimage.handle();
7315 img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
7316 img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
7317 img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7318 img_barrier.subresourceRange.baseArrayLayer = 0;
7319 img_barrier.subresourceRange.baseMipLevel = 0;
7320 img_barrier.subresourceRange.layerCount = 1;
7321 img_barrier.subresourceRange.levelCount = 1;
7322 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
7323 VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
7324 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
7325 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
7326 vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
7327 &descriptor_set.set_, 0, nullptr);
7328
7329 VkViewport viewport = {0, 0, 16, 16, 0, 1};
7330 VkRect2D scissor = {{0, 0}, {16, 16}};
7331 vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
7332 vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
7333
7334 m_commandBuffer->Draw(1, 0, 0, 0);
7335 m_commandBuffer->EndRenderPass();
7336 m_commandBuffer->end();
7337 VkSubmitInfo submit_info = {};
7338 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
7339 submit_info.commandBufferCount = 1;
7340 submit_info.pCommandBuffers = &m_commandBuffer->handle();
7341 vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
7342 m_errorMonitor->VerifyNotFound();
7343
7344 vkQueueWaitIdle(m_device->m_queue);
7345 vkDestroyImageView(m_device->device(), view, NULL);
7346 vkDestroySampler(m_device->device(), sampler, nullptr);
7347 }
7348
TEST_F(VkPositiveLayerTest,ApiVersionZero)7349 TEST_F(VkPositiveLayerTest, ApiVersionZero) {
7350 TEST_DESCRIPTION("Check that apiVersion = 0 is valid.");
7351 m_errorMonitor->ExpectSuccess();
7352 app_info.apiVersion = 0U;
7353 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7354 m_errorMonitor->VerifyNotFound();
7355 }
7356
TEST_F(VkPositiveLayerTest,RayTracingPipelineNV)7357 TEST_F(VkPositiveLayerTest, RayTracingPipelineNV) {
7358 TEST_DESCRIPTION("Test VK_NV_ray_tracing.");
7359
7360 if (!CreateNVRayTracingPipelineHelper::InitInstanceExtensions(*this, m_instance_extension_names)) {
7361 return;
7362 }
7363 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7364
7365 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
7366 (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
7367 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
7368
7369 if (!CreateNVRayTracingPipelineHelper::InitDeviceExtensions(*this, m_device_extension_names)) {
7370 return;
7371 }
7372 ASSERT_NO_FATAL_FAILURE(InitState());
7373
7374 auto ignore_update = [](CreateNVRayTracingPipelineHelper &helper) {};
7375 CreateNVRayTracingPipelineHelper::OneshotPositiveTest(*this, ignore_update);
7376 }
7377
TEST_F(VkPositiveLayerTest,ViewportArray2NV)7378 TEST_F(VkPositiveLayerTest, ViewportArray2NV) {
7379 TEST_DESCRIPTION("Test to validate VK_NV_viewport_array2");
7380
7381 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7382
7383 VkPhysicalDeviceFeatures available_features = {};
7384 ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
7385
7386 if (!available_features.multiViewport) {
7387 printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported, skipping tests\n", kSkipPrefix);
7388 return;
7389 }
7390 if (!available_features.tessellationShader) {
7391 printf("%s VkPhysicalDeviceFeatures::tessellationShader is not supported, skipping tests\n", kSkipPrefix);
7392 return;
7393 }
7394 if (!available_features.geometryShader) {
7395 printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping tests\n", kSkipPrefix);
7396 return;
7397 }
7398
7399 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
7400 m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
7401 } else {
7402 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
7403 return;
7404 }
7405
7406 ASSERT_NO_FATAL_FAILURE(InitState());
7407 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
7408
7409 const char tcs_src[] = R"(
7410 #version 450
7411 layout(vertices = 3) out;
7412
7413 void main() {
7414 gl_TessLevelOuter[0] = 4.0f;
7415 gl_TessLevelOuter[1] = 4.0f;
7416 gl_TessLevelOuter[2] = 4.0f;
7417 gl_TessLevelInner[0] = 3.0f;
7418
7419 gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
7420 })";
7421
7422 // Create tessellation control and fragment shader here since they will not be
7423 // modified by the different test cases.
7424 VkShaderObj tcs(m_device, tcs_src, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
7425 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
7426
7427 std::vector<VkViewport> vps = {{0.0f, 0.0f, m_width / 2.0f, m_height}, {m_width / 2.0f, 0.0f, m_width / 2.0f, m_height}};
7428 std::vector<VkRect2D> scs = {
7429 {{0, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}},
7430 {{static_cast<int32_t>(m_width) / 2, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}}};
7431
7432 enum class TestStage { VERTEX = 0, TESSELLATION_EVAL = 1, GEOMETRY = 2 };
7433 std::array<TestStage, 3> vertex_stages = {{TestStage::VERTEX, TestStage::TESSELLATION_EVAL, TestStage::GEOMETRY}};
7434
7435 // Verify that the usage of gl_ViewportMask[] in the allowed vertex processing
7436 // stages does not cause any errors.
7437 for (auto stage : vertex_stages) {
7438 m_errorMonitor->ExpectSuccess();
7439
7440 VkPipelineInputAssemblyStateCreateInfo iaci = {VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO};
7441 iaci.topology = (stage != TestStage::VERTEX) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
7442
7443 VkPipelineTessellationStateCreateInfo tsci = {VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO};
7444 tsci.patchControlPoints = 3;
7445
7446 const VkPipelineLayoutObj pl(m_device);
7447
7448 VkPipelineObj pipe(m_device);
7449 pipe.AddDefaultColorAttachment();
7450 pipe.SetInputAssembly(&iaci);
7451 pipe.SetViewport(vps);
7452 pipe.SetScissor(scs);
7453 pipe.AddShader(&fs);
7454
7455 std::stringstream vs_src, tes_src, geom_src;
7456
7457 vs_src << R"(
7458 #version 450
7459 #extension GL_NV_viewport_array2 : require
7460
7461 vec2 positions[3] = { vec2( 0.0f, -0.5f),
7462 vec2( 0.5f, 0.5f),
7463 vec2(-0.5f, 0.5f)
7464 };
7465 void main() {)";
7466 // Write viewportMask if the vertex shader is the last vertex processing stage.
7467 if (stage == TestStage::VERTEX) {
7468 vs_src << "gl_ViewportMask[0] = 3;\n";
7469 }
7470 vs_src << R"(
7471 gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
7472 })";
7473
7474 VkShaderObj vs(m_device, vs_src.str().c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
7475 pipe.AddShader(&vs);
7476
7477 std::unique_ptr<VkShaderObj> tes, geom;
7478
7479 if (stage >= TestStage::TESSELLATION_EVAL) {
7480 tes_src << R"(
7481 #version 450
7482 #extension GL_NV_viewport_array2 : require
7483 layout(triangles) in;
7484
7485 void main() {
7486 gl_Position = (gl_in[0].gl_Position * gl_TessCoord.x +
7487 gl_in[1].gl_Position * gl_TessCoord.y +
7488 gl_in[2].gl_Position * gl_TessCoord.z);)";
7489 // Write viewportMask if the tess eval shader is the last vertex processing stage.
7490 if (stage == TestStage::TESSELLATION_EVAL) {
7491 tes_src << "gl_ViewportMask[0] = 3;\n";
7492 }
7493 tes_src << "}";
7494
7495 tes = std::unique_ptr<VkShaderObj>(
7496 new VkShaderObj(m_device, tes_src.str().c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this));
7497 pipe.AddShader(tes.get());
7498 pipe.AddShader(&tcs);
7499 pipe.SetTessellation(&tsci);
7500 }
7501
7502 if (stage >= TestStage::GEOMETRY) {
7503 geom_src << R"(
7504 #version 450
7505 #extension GL_NV_viewport_array2 : require
7506 layout(triangles) in;
7507 layout(triangle_strip, max_vertices = 3) out;
7508
7509 void main() {
7510 gl_ViewportMask[0] = 3;
7511 for(int i = 0; i < 3; ++i) {
7512 gl_Position = gl_in[i].gl_Position;
7513 EmitVertex();
7514 }
7515 })";
7516
7517 geom =
7518 std::unique_ptr<VkShaderObj>(new VkShaderObj(m_device, geom_src.str().c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this));
7519 pipe.AddShader(geom.get());
7520 }
7521
7522 pipe.CreateVKPipeline(pl.handle(), renderPass());
7523 m_errorMonitor->VerifyNotFound();
7524 }
7525 }
7526
TEST_F(VkPositiveLayerTest,HostQueryResetSuccess)7527 TEST_F(VkPositiveLayerTest, HostQueryResetSuccess) {
7528 // This is a positive test. No failures are expected.
7529 TEST_DESCRIPTION("Use vkResetQueryPoolEXT normally");
7530
7531 if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
7532 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
7533 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
7534 return;
7535 }
7536
7537 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
7538 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7539
7540 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
7541 printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
7542 return;
7543 }
7544
7545 m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
7546
7547 VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
7548 host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
7549 host_query_reset_features.hostQueryReset = VK_TRUE;
7550
7551 VkPhysicalDeviceFeatures2 pd_features2{};
7552 pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
7553 pd_features2.pNext = &host_query_reset_features;
7554
7555 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
7556
7557 auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
7558
7559 m_errorMonitor->ExpectSuccess();
7560
7561 VkQueryPool query_pool;
7562 VkQueryPoolCreateInfo query_pool_create_info{};
7563 query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
7564 query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
7565 query_pool_create_info.queryCount = 1;
7566 vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
7567 fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
7568 vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
7569
7570 m_errorMonitor->VerifyNotFound();
7571 }
7572
TEST_F(VkPositiveLayerTest,CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled)7573 TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled) {
7574 TEST_DESCRIPTION(
7575 "Test that no warning is produced when writing to non-existing color attachment if alpha to coverage is enabled.");
7576
7577 ASSERT_NO_FATAL_FAILURE(Init());
7578 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
7579
7580 VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
7581 ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
7582 ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
7583 ms_state_ci.alphaToCoverageEnable = VK_TRUE;
7584
7585 const auto set_info = [&](CreatePipelineHelper &helper) {
7586 helper.pipe_ms_state_ci_ = ms_state_ci;
7587 helper.cb_ci_.attachmentCount = 0;
7588 };
7589 CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, "", true);
7590 }
7591
TEST_F(VkPositiveLayerTest,UseFirstQueueUnqueried)7592 TEST_F(VkPositiveLayerTest, UseFirstQueueUnqueried) {
7593 TEST_DESCRIPTION("Use first queue family and one queue without first querying with vkGetPhysicalDeviceQueueFamilyProperties");
7594
7595 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7596
7597 const float q_priority[] = {1.0f};
7598 VkDeviceQueueCreateInfo queue_ci = {};
7599 queue_ci.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
7600 queue_ci.queueFamilyIndex = 0;
7601 queue_ci.queueCount = 1;
7602 queue_ci.pQueuePriorities = q_priority;
7603
7604 VkDeviceCreateInfo device_ci = {};
7605 device_ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
7606 device_ci.queueCreateInfoCount = 1;
7607 device_ci.pQueueCreateInfos = &queue_ci;
7608
7609 m_errorMonitor->ExpectSuccess();
7610 VkDevice test_device;
7611 vkCreateDevice(gpu(), &device_ci, nullptr, &test_device);
7612 m_errorMonitor->VerifyNotFound();
7613
7614 vkDestroyDevice(test_device, nullptr);
7615 }
7616
7617 // Android loader returns an error in this case
7618 #if !defined(ANDROID)
TEST_F(VkPositiveLayerTest,GetDevProcAddrNullPtr)7619 TEST_F(VkPositiveLayerTest, GetDevProcAddrNullPtr) {
7620 TEST_DESCRIPTION("Call GetDeviceProcAddr on an enabled instance extension expecting nullptr");
7621 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7622
7623 if (InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
7624 m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
7625 } else {
7626 printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_SURFACE_EXTENSION_NAME);
7627 return;
7628 }
7629 ASSERT_NO_FATAL_FAILURE(InitState());
7630
7631 m_errorMonitor->ExpectSuccess();
7632 auto fpDestroySurface = (PFN_vkCreateValidationCacheEXT)vkGetDeviceProcAddr(m_device->device(), "vkDestroySurfaceKHR");
7633 if (fpDestroySurface) {
7634 m_errorMonitor->SetError("Null was expected!");
7635 }
7636 m_errorMonitor->VerifyNotFound();
7637 }
7638 #endif
7639
TEST_F(VkPositiveLayerTest,CmdCopySwapchainImage)7640 TEST_F(VkPositiveLayerTest, CmdCopySwapchainImage) {
7641 TEST_DESCRIPTION("Run vkCmdCopyImage with a swapchain image");
7642
7643 #if defined(VK_USE_PLATFORM_ANDROID_KHR)
7644 printf(
7645 "%s According to VUID-01631, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
7646 "skipping CmdCopySwapchainImage test\n",
7647 kSkipPrefix);
7648 return;
7649 #endif
7650
7651 SetTargetApiVersion(VK_API_VERSION_1_1);
7652
7653 if (!AddSurfaceInstanceExtension()) {
7654 printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
7655 return;
7656 }
7657
7658 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7659
7660 if (!AddSwapchainDeviceExtension()) {
7661 printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
7662 return;
7663 }
7664
7665 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
7666 printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
7667 return;
7668 }
7669
7670 ASSERT_NO_FATAL_FAILURE(InitState());
7671 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
7672 if (!InitSwapchain()) {
7673 printf("%s Cannot create surface or swapchain, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
7674 return;
7675 }
7676
7677 auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
7678 image_create_info.imageType = VK_IMAGE_TYPE_2D;
7679 image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
7680 image_create_info.extent.width = 64;
7681 image_create_info.extent.height = 64;
7682 image_create_info.extent.depth = 1;
7683 image_create_info.mipLevels = 1;
7684 image_create_info.arrayLayers = 1;
7685 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
7686 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
7687 image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
7688 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
7689 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
7690
7691 VkImageObj srcImage(m_device);
7692 srcImage.init(&image_create_info);
7693
7694 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
7695
7696 auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
7697 image_swapchain_create_info.swapchain = m_swapchain;
7698 image_create_info.pNext = &image_swapchain_create_info;
7699
7700 VkImage image_from_swapchain;
7701 vkCreateImage(device(), &image_create_info, NULL, &image_from_swapchain);
7702
7703 auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>();
7704 bind_swapchain_info.swapchain = m_swapchain;
7705 bind_swapchain_info.imageIndex = 0;
7706
7707 auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
7708 bind_info.image = image_from_swapchain;
7709 bind_info.memory = VK_NULL_HANDLE;
7710 bind_info.memoryOffset = 0;
7711
7712 vkBindImageMemory2(m_device->device(), 1, &bind_info);
7713
7714 VkImageCopy copy_region = {};
7715 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7716 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7717 copy_region.srcSubresource.mipLevel = 0;
7718 copy_region.dstSubresource.mipLevel = 0;
7719 copy_region.srcSubresource.baseArrayLayer = 0;
7720 copy_region.dstSubresource.baseArrayLayer = 0;
7721 copy_region.srcSubresource.layerCount = 1;
7722 copy_region.dstSubresource.layerCount = 1;
7723 copy_region.srcOffset = {0, 0, 0};
7724 copy_region.dstOffset = {0, 0, 0};
7725 copy_region.extent = {10, 10, 1};
7726
7727 m_commandBuffer->begin();
7728
7729 m_errorMonitor->ExpectSuccess();
7730 vkCmdCopyImage(m_commandBuffer->handle(), srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, image_from_swapchain,
7731 VK_IMAGE_LAYOUT_GENERAL, 1, ©_region);
7732 m_errorMonitor->VerifyNotFound();
7733
7734 vkDestroyImage(m_device->device(), image_from_swapchain, NULL);
7735 DestroySwapchain();
7736 }
7737
TEST_F(VkPositiveLayerTest,TransferImageToSwapchainDeviceGroup)7738 TEST_F(VkPositiveLayerTest, TransferImageToSwapchainDeviceGroup) {
7739 TEST_DESCRIPTION("Transfer an image to a swapchain's image between device group");
7740
7741 #if defined(VK_USE_PLATFORM_ANDROID_KHR)
7742 printf(
7743 "%s According to VUID-01631, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
7744 "skipping test\n",
7745 kSkipPrefix);
7746 return;
7747 #endif
7748
7749 SetTargetApiVersion(VK_API_VERSION_1_1);
7750
7751 if (!AddSurfaceInstanceExtension()) {
7752 printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
7753 return;
7754 }
7755
7756 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7757
7758 if (!AddSwapchainDeviceExtension()) {
7759 printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
7760 return;
7761 }
7762
7763 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
7764 printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
7765 return;
7766 }
7767 uint32_t physical_device_group_count = 0;
7768 vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
7769
7770 if (physical_device_group_count == 0) {
7771 printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
7772 return;
7773 }
7774
7775 std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
7776 {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
7777 vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
7778 VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
7779 create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
7780 create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
7781 create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
7782 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
7783 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
7784 if (!InitSwapchain(VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
7785 printf("%s Cannot create surface or swapchain, skipping test\n", kSkipPrefix);
7786 return;
7787 }
7788
7789 auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
7790 image_create_info.imageType = VK_IMAGE_TYPE_2D;
7791 image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
7792 image_create_info.extent.width = 64;
7793 image_create_info.extent.height = 64;
7794 image_create_info.extent.depth = 1;
7795 image_create_info.mipLevels = 1;
7796 image_create_info.arrayLayers = 1;
7797 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
7798 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
7799 image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
7800 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
7801 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
7802
7803 VkImageObj src_Image(m_device);
7804 src_Image.init(&image_create_info);
7805
7806 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
7807 image_create_info.flags = VK_IMAGE_CREATE_ALIAS_BIT;
7808
7809 auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
7810 image_swapchain_create_info.swapchain = m_swapchain;
7811 image_create_info.pNext = &image_swapchain_create_info;
7812
7813 VkImage peer_image;
7814 vkCreateImage(device(), &image_create_info, NULL, &peer_image);
7815
7816 auto bind_devicegroup_info = lvl_init_struct<VkBindImageMemoryDeviceGroupInfo>();
7817 bind_devicegroup_info.deviceIndexCount = 2;
7818 std::array<uint32_t, 2> deviceIndices = {0, 0};
7819 bind_devicegroup_info.pDeviceIndices = deviceIndices.data();
7820 bind_devicegroup_info.splitInstanceBindRegionCount = 0;
7821 bind_devicegroup_info.pSplitInstanceBindRegions = nullptr;
7822
7823 auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>(&bind_devicegroup_info);
7824 bind_swapchain_info.swapchain = m_swapchain;
7825 bind_swapchain_info.imageIndex = 0;
7826
7827 auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
7828 bind_info.image = peer_image;
7829 bind_info.memory = VK_NULL_HANDLE;
7830 bind_info.memoryOffset = 0;
7831
7832 vkBindImageMemory2(m_device->device(), 1, &bind_info);
7833
7834 uint32_t swapchain_images_count = 0;
7835 vkGetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, nullptr);
7836 std::vector<VkImage> swapchain_images;
7837 swapchain_images.resize(swapchain_images_count);
7838 vkGetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, swapchain_images.data());
7839
7840 m_commandBuffer->begin();
7841
7842 auto img_barrier = lvl_init_struct<VkImageMemoryBarrier>();
7843 img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
7844 img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
7845 img_barrier.image = swapchain_images[0];
7846 img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
7847 img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
7848 img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7849 img_barrier.subresourceRange.baseArrayLayer = 0;
7850 img_barrier.subresourceRange.baseMipLevel = 0;
7851 img_barrier.subresourceRange.layerCount = 1;
7852 img_barrier.subresourceRange.levelCount = 1;
7853 vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
7854 nullptr, 0, nullptr, 1, &img_barrier);
7855
7856 m_commandBuffer->end();
7857 m_commandBuffer->QueueCommandBuffer();
7858
7859 m_commandBuffer->reset();
7860 m_commandBuffer->begin();
7861
7862 VkImageCopy copy_region = {};
7863 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7864 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
7865 copy_region.srcSubresource.mipLevel = 0;
7866 copy_region.dstSubresource.mipLevel = 0;
7867 copy_region.srcSubresource.baseArrayLayer = 0;
7868 copy_region.dstSubresource.baseArrayLayer = 0;
7869 copy_region.srcSubresource.layerCount = 1;
7870 copy_region.dstSubresource.layerCount = 1;
7871 copy_region.srcOffset = {0, 0, 0};
7872 copy_region.dstOffset = {0, 0, 0};
7873 copy_region.extent = {10, 10, 1};
7874 vkCmdCopyImage(m_commandBuffer->handle(), src_Image.handle(), VK_IMAGE_LAYOUT_GENERAL, peer_image,
7875 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©_region);
7876
7877 m_commandBuffer->end();
7878 m_errorMonitor->ExpectSuccess();
7879 m_commandBuffer->QueueCommandBuffer();
7880 m_errorMonitor->VerifyNotFound();
7881
7882 vkDestroyImage(m_device->device(), peer_image, NULL);
7883 DestroySwapchain();
7884 }
7885
TEST_F(VkPositiveLayerTest,RenderPassValidStages)7886 TEST_F(VkPositiveLayerTest, RenderPassValidStages) {
7887 TEST_DESCRIPTION("Create render pass with valid stages");
7888
7889 bool rp2_supported = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
7890 if (rp2_supported) m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
7891
7892 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7893 if (rp2_supported) rp2_supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
7894 ASSERT_NO_FATAL_FAILURE(InitState());
7895
7896 VkSubpassDescription sci[2] = {};
7897 sci[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
7898 sci[1].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
7899
7900 VkSubpassDependency dependency = {};
7901 // to be filled later by tests
7902
7903 VkRenderPassCreateInfo rpci = {};
7904 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
7905 rpci.subpassCount = 2;
7906 rpci.pSubpasses = sci;
7907 rpci.dependencyCount = 1;
7908 rpci.pDependencies = &dependency;
7909
7910 const VkPipelineStageFlags kGraphicsStages =
7911 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
7912 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
7913 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
7914 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
7915
7916 dependency.srcSubpass = 0;
7917 dependency.dstSubpass = 1;
7918 dependency.srcStageMask = kGraphicsStages;
7919 dependency.dstStageMask = kGraphicsStages;
7920 PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
7921
7922 dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
7923 dependency.dstSubpass = 0;
7924 dependency.srcStageMask = kGraphicsStages | VK_PIPELINE_STAGE_HOST_BIT;
7925 dependency.dstStageMask = kGraphicsStages;
7926 PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
7927
7928 dependency.srcSubpass = 0;
7929 dependency.dstSubpass = VK_SUBPASS_EXTERNAL;
7930 dependency.srcStageMask = kGraphicsStages;
7931 dependency.dstStageMask = VK_PIPELINE_STAGE_HOST_BIT;
7932 PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
7933 }
7934
TEST_F(VkPositiveLayerTest,SampleMaskOverrideCoverageNV)7935 TEST_F(VkPositiveLayerTest, SampleMaskOverrideCoverageNV) {
7936 TEST_DESCRIPTION("Test to validate VK_NV_sample_mask_override_coverage");
7937
7938 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
7939
7940 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME)) {
7941 m_device_extension_names.push_back(VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
7942 } else {
7943 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
7944 return;
7945 }
7946
7947 ASSERT_NO_FATAL_FAILURE(InitState());
7948
7949 const char vs_src[] = R"(
7950 #version 450
7951 layout(location=0) out vec4 fragColor;
7952
7953 const vec2 pos[3] = { vec2( 0.0f, -0.5f),
7954 vec2( 0.5f, 0.5f),
7955 vec2(-0.5f, 0.5f)
7956 };
7957 void main()
7958 {
7959 gl_Position = vec4(pos[gl_VertexIndex % 3], 0.0f, 1.0f);
7960 fragColor = vec4(0.0f, 1.0f, 0.0f, 1.0f);
7961 })";
7962
7963 const char fs_src[] = R"(
7964 #version 450
7965 #extension GL_NV_sample_mask_override_coverage : require
7966
7967 layout(location = 0) in vec4 fragColor;
7968 layout(location = 0) out vec4 outColor;
7969
7970 layout(override_coverage) out int gl_SampleMask[];
7971
7972 void main()
7973 {
7974 gl_SampleMask[0] = 0xff;
7975 outColor = fragColor;
7976 })";
7977
7978 m_errorMonitor->ExpectSuccess();
7979
7980 const VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_8_BIT;
7981
7982 VkAttachmentDescription cAttachment = {};
7983 cAttachment.format = VK_FORMAT_B8G8R8A8_UNORM;
7984 cAttachment.samples = sampleCount;
7985 cAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
7986 cAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
7987 cAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
7988 cAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
7989 cAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
7990 cAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
7991
7992 VkAttachmentReference cAttachRef = {};
7993 cAttachRef.attachment = 0;
7994 cAttachRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
7995
7996 VkSubpassDescription subpass = {};
7997 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
7998 subpass.colorAttachmentCount = 1;
7999 subpass.pColorAttachments = &cAttachRef;
8000
8001 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
8002 rpci.attachmentCount = 1;
8003 rpci.pAttachments = &cAttachment;
8004 rpci.subpassCount = 1;
8005 rpci.pSubpasses = &subpass;
8006
8007 VkRenderPass rp;
8008 vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
8009
8010 const VkPipelineLayoutObj pl(m_device);
8011
8012 VkSampleMask sampleMask = 0x01;
8013 VkPipelineMultisampleStateCreateInfo msaa = {VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO};
8014 msaa.rasterizationSamples = sampleCount;
8015 msaa.sampleShadingEnable = VK_FALSE;
8016 msaa.pSampleMask = &sampleMask;
8017
8018 VkPipelineObj pipe(m_device);
8019 pipe.AddDefaultColorAttachment();
8020 pipe.SetMSAA(&msaa);
8021
8022 VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
8023 pipe.AddShader(&vs);
8024
8025 VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
8026 pipe.AddShader(&fs);
8027
8028 // Create pipeline and make sure that the usage of NV_sample_mask_override_coverage
8029 // in the fragment shader does not cause any errors.
8030 pipe.CreateVKPipeline(pl.handle(), rp);
8031
8032 vkDestroyRenderPass(m_device->device(), rp, nullptr);
8033
8034 m_errorMonitor->VerifyNotFound();
8035 }
8036
TEST_F(VkPositiveLayerTest,TestRasterizationDiscardEnableTrue)8037 TEST_F(VkPositiveLayerTest, TestRasterizationDiscardEnableTrue) {
8038 TEST_DESCRIPTION("Ensure it doesn't crash and trigger error msg when rasterizerDiscardEnable = true");
8039 ASSERT_NO_FATAL_FAILURE(Init());
8040 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
8041
8042 VkAttachmentDescription att[1] = {{}};
8043 att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
8044 att[0].samples = VK_SAMPLE_COUNT_4_BIT;
8045 att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
8046 att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
8047 VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
8048 VkSubpassDescription sp = {};
8049 sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
8050 sp.colorAttachmentCount = 1;
8051 sp.pColorAttachments = &cr;
8052 VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
8053 rpi.attachmentCount = 1;
8054 rpi.pAttachments = att;
8055 rpi.subpassCount = 1;
8056 rpi.pSubpasses = &sp;
8057 VkRenderPass rp;
8058 vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
8059
8060 CreatePipelineHelper pipe(*this);
8061 pipe.InitInfo();
8062 pipe.gp_ci_.pViewportState = nullptr;
8063 pipe.gp_ci_.pMultisampleState = nullptr;
8064 pipe.gp_ci_.pDepthStencilState = nullptr;
8065 pipe.gp_ci_.pColorBlendState = nullptr;
8066 pipe.gp_ci_.renderPass = rp;
8067
8068 m_errorMonitor->ExpectSuccess();
8069 // Skip the test in NexusPlayer. The driver crashes when pViewportState, pMultisampleState, pDepthStencilState, pColorBlendState
8070 // are NULL.
8071 pipe.rs_state_ci_.rasterizerDiscardEnable = VK_TRUE;
8072 pipe.InitState();
8073 pipe.CreateGraphicsPipeline();
8074 m_errorMonitor->VerifyNotFound();
8075 vkDestroyRenderPass(m_device->device(), rp, nullptr);
8076 }
8077
TEST_F(VkPositiveLayerTest,TestSamplerDataForCombinedImageSampler)8078 TEST_F(VkPositiveLayerTest, TestSamplerDataForCombinedImageSampler) {
8079 TEST_DESCRIPTION("Shader code uses sampler data for CombinedImageSampler");
8080 ASSERT_NO_FATAL_FAILURE(Init());
8081 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
8082
8083 const std::string fsSource = R"(
8084 OpCapability Shader
8085 OpMemoryModel Logical GLSL450
8086 OpEntryPoint Fragment %main "main"
8087 OpExecutionMode %main OriginUpperLeft
8088
8089 OpDecorate %InputData DescriptorSet 0
8090 OpDecorate %InputData Binding 0
8091 OpDecorate %SamplerData DescriptorSet 0
8092 OpDecorate %SamplerData Binding 0
8093
8094 %void = OpTypeVoid
8095 %f32 = OpTypeFloat 32
8096 %Image = OpTypeImage %f32 2D 0 0 0 1 Rgba32f
8097 %ImagePtr = OpTypePointer UniformConstant %Image
8098 %InputData = OpVariable %ImagePtr UniformConstant
8099 %Sampler = OpTypeSampler
8100 %SamplerPtr = OpTypePointer UniformConstant %Sampler
8101 %SamplerData = OpVariable %SamplerPtr UniformConstant
8102 %SampledImage = OpTypeSampledImage %Image
8103
8104 %func = OpTypeFunction %void
8105 %main = OpFunction %void None %func
8106 %40 = OpLabel
8107 %call_smp = OpLoad %Sampler %SamplerData
8108 OpReturn
8109 OpFunctionEnd)";
8110
8111 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
8112
8113 CreatePipelineHelper pipe(*this);
8114 pipe.InitInfo();
8115 pipe.dsl_bindings_ = {
8116 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
8117 };
8118 pipe.shader_stages_ = {fs.GetStageCreateInfo(), pipe.vs_->GetStageCreateInfo()};
8119 pipe.InitState();
8120 pipe.CreateGraphicsPipeline();
8121
8122 VkImageObj image(m_device);
8123 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
8124 VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
8125
8126 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
8127 VkSampler sampler;
8128 vkCreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
8129
8130 uint32_t qfi = 0;
8131 VkBufferCreateInfo buffer_create_info = {};
8132 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
8133 buffer_create_info.size = 1024;
8134 buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
8135 buffer_create_info.queueFamilyIndexCount = 1;
8136 buffer_create_info.pQueueFamilyIndices = &qfi;
8137
8138 VkBufferObj buffer;
8139 buffer.init(*m_device, buffer_create_info);
8140
8141 pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
8142 pipe.descriptor_set_->UpdateDescriptorSets();
8143
8144 m_commandBuffer->begin();
8145 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
8146 vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
8147 vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
8148 &pipe.descriptor_set_->set_, 0, NULL);
8149
8150 m_errorMonitor->ExpectSuccess();
8151 vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
8152 m_errorMonitor->VerifyNotFound();
8153
8154 vkCmdEndRenderPass(m_commandBuffer->handle());
8155 m_commandBuffer->end();
8156 vkDestroySampler(m_device->device(), sampler, NULL);
8157 }
8158
TEST_F(VkPositiveLayerTest,NotPointSizeGeometryShaderSuccess)8159 TEST_F(VkPositiveLayerTest, NotPointSizeGeometryShaderSuccess) {
8160 TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST, but geometry shader doesn't include PointSize.");
8161
8162 ASSERT_NO_FATAL_FAILURE(Init());
8163
8164 if ((!m_device->phy().features().geometryShader)) {
8165 printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
8166 return;
8167 }
8168 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
8169 ASSERT_NO_FATAL_FAILURE(InitViewport());
8170
8171 VkShaderObj gs(m_device, bindStateGeomShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
8172
8173 CreatePipelineHelper pipe(*this);
8174 pipe.InitInfo();
8175 pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
8176 pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
8177 pipe.InitState();
8178
8179 m_errorMonitor->ExpectSuccess();
8180 pipe.CreateGraphicsPipeline();
8181 m_errorMonitor->VerifyNotFound();
8182 }
8183
TEST_F(VkPositiveLayerTest,SubpassWithReadOnlyLayoutWithoutDependency)8184 TEST_F(VkPositiveLayerTest, SubpassWithReadOnlyLayoutWithoutDependency) {
8185 TEST_DESCRIPTION("When both subpasses' attachments are the same and layouts are read-only, they don't need dependency.");
8186 ASSERT_NO_FATAL_FAILURE(Init());
8187
8188 auto depth_format = FindSupportedDepthStencilFormat(gpu());
8189 if (!depth_format) {
8190 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
8191 return;
8192 }
8193
8194 // A renderpass with one color attachment.
8195 VkAttachmentDescription attachment = {0,
8196 depth_format,
8197 VK_SAMPLE_COUNT_1_BIT,
8198 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
8199 VK_ATTACHMENT_STORE_OP_STORE,
8200 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
8201 VK_ATTACHMENT_STORE_OP_DONT_CARE,
8202 VK_IMAGE_LAYOUT_UNDEFINED,
8203 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
8204 const int size = 2;
8205 std::array<VkAttachmentDescription, size> attachments = {attachment, attachment};
8206
8207 VkAttachmentReference att_ref_depth_stencil = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
8208
8209 std::array<VkSubpassDescription, size> subpasses;
8210 subpasses[0] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
8211 subpasses[1] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
8212
8213 VkRenderPassCreateInfo rpci = {
8214 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, size, attachments.data(), size, subpasses.data(), 0, nullptr};
8215
8216 VkRenderPass rp;
8217 VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
8218 ASSERT_VK_SUCCESS(err);
8219
8220 // A compatible framebuffer.
8221 VkImageObj image(m_device);
8222 image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_LINEAR, 0);
8223 ASSERT_TRUE(image.initialized());
8224
8225 VkImageViewCreateInfo ivci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
8226 nullptr,
8227 0,
8228 image.handle(),
8229 VK_IMAGE_VIEW_TYPE_2D,
8230 depth_format,
8231 {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
8232 VK_COMPONENT_SWIZZLE_IDENTITY},
8233 {VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, 0, 1, 0, 1}};
8234
8235 VkImageView view;
8236 err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
8237 ASSERT_VK_SUCCESS(err);
8238 std::array<VkImageView, size> views = {view, view};
8239
8240 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, size, views.data(), 32, 32, 1};
8241 VkFramebuffer fb;
8242 err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
8243 ASSERT_VK_SUCCESS(err);
8244
8245 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
8246 m_commandBuffer->begin();
8247 vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
8248 vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
8249 vkCmdEndRenderPass(m_commandBuffer->handle());
8250 m_commandBuffer->end();
8251
8252 vkDestroyFramebuffer(m_device->device(), fb, nullptr);
8253 vkDestroyRenderPass(m_device->device(), rp, nullptr);
8254 vkDestroyImageView(m_device->device(), view, nullptr);
8255 }
8256
TEST_F(VkPositiveLayerTest,GeometryShaderPassthroughNV)8257 TEST_F(VkPositiveLayerTest, GeometryShaderPassthroughNV) {
8258 TEST_DESCRIPTION("Test to validate VK_NV_geometry_shader_passthrough");
8259
8260 ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
8261
8262 VkPhysicalDeviceFeatures available_features = {};
8263 ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
8264
8265 if (!available_features.geometryShader) {
8266 printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping test\n", kSkipPrefix);
8267 return;
8268 }
8269
8270 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME)) {
8271 m_device_extension_names.push_back(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
8272 } else {
8273 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
8274 return;
8275 }
8276
8277 ASSERT_NO_FATAL_FAILURE(InitState());
8278 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
8279
8280 const char vs_src[] = R"(
8281 #version 450
8282
8283 out gl_PerVertex {
8284 vec4 gl_Position;
8285 };
8286
8287 layout(location = 0) out ColorBlock {vec4 vertexColor;};
8288
8289 const vec2 positions[3] = { vec2( 0.0f, -0.5f),
8290 vec2( 0.5f, 0.5f),
8291 vec2(-0.5f, 0.5f)
8292 };
8293
8294 const vec4 colors[3] = { vec4(1.0f, 0.0f, 0.0f, 1.0f),
8295 vec4(0.0f, 1.0f, 0.0f, 1.0f),
8296 vec4(0.0f, 0.0f, 1.0f, 1.0f)
8297 };
8298 void main()
8299 {
8300 vertexColor = colors[gl_VertexIndex % 3];
8301 gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
8302 })";
8303
8304 const char gs_src[] = R"(
8305 #version 450
8306 #extension GL_NV_geometry_shader_passthrough: require
8307
8308 layout(triangles) in;
8309 layout(triangle_strip, max_vertices = 3) out;
8310
8311 layout(passthrough) in gl_PerVertex {vec4 gl_Position;};
8312 layout(location = 0, passthrough) in ColorBlock {vec4 vertexColor;};
8313
8314 void main()
8315 {
8316 gl_Layer = 0;
8317 })";
8318
8319 const char fs_src[] = R"(
8320 #version 450
8321
8322 layout(location = 0) in ColorBlock {vec4 vertexColor;};
8323 layout(location = 0) out vec4 outColor;
8324
8325 void main() {
8326 outColor = vertexColor;
8327 })";
8328
8329 m_errorMonitor->ExpectSuccess();
8330
8331 const VkPipelineLayoutObj pl(m_device);
8332
8333 VkPipelineObj pipe(m_device);
8334 pipe.AddDefaultColorAttachment();
8335
8336 VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
8337 pipe.AddShader(&vs);
8338
8339 VkShaderObj gs(m_device, gs_src, VK_SHADER_STAGE_GEOMETRY_BIT, this);
8340 pipe.AddShader(&gs);
8341
8342 VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
8343 pipe.AddShader(&fs);
8344
8345 // Create pipeline and make sure that the usage of NV_geometry_shader_passthrough
8346 // in the fragment shader does not cause any errors.
8347 pipe.CreateVKPipeline(pl.handle(), renderPass());
8348
8349 m_errorMonitor->VerifyNotFound();
8350 }
8351