• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2015-2019 The Khronos Group Inc.
3  * Copyright (c) 2015-2019 Valve Corporation
4  * Copyright (c) 2015-2019 LunarG, Inc.
5  * Copyright (c) 2015-2019 Google, Inc.
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *     http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Author: Chia-I Wu <olvaffe@gmail.com>
14  * Author: Chris Forbes <chrisf@ijw.co.nz>
15  * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16  * Author: Mark Lobodzinski <mark@lunarg.com>
17  * Author: Mike Stroyan <mike@LunarG.com>
18  * Author: Tobin Ehlis <tobine@google.com>
19  * Author: Tony Barbour <tony@LunarG.com>
20  * Author: Cody Northrop <cnorthrop@google.com>
21  * Author: Dave Houlton <daveh@lunarg.com>
22  * Author: Jeremy Kniager <jeremyk@lunarg.com>
23  * Author: Shannon McPherson <shannon@lunarg.com>
24  * Author: John Zulauf <jzulauf@lunarg.com>
25  */
26 #include "cast_utils.h"
27 #include "layer_validation_tests.h"
28 
FindSupportedDepthStencilFormat(VkPhysicalDevice phy)29 VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy) {
30     const VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT};
31     for (uint32_t i = 0; i < size(ds_formats); ++i) {
32         VkFormatProperties format_props;
33         vkGetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
34 
35         if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
36             return ds_formats[i];
37         }
38     }
39     return VK_FORMAT_UNDEFINED;
40 }
41 
ImageFormatIsSupported(VkPhysicalDevice phy,VkFormat format,VkImageTiling tiling,VkFormatFeatureFlags features)42 bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
43     VkFormatProperties format_props;
44     vkGetPhysicalDeviceFormatProperties(phy, format, &format_props);
45     VkFormatFeatureFlags phy_features =
46         (VK_IMAGE_TILING_OPTIMAL == tiling ? format_props.optimalTilingFeatures : format_props.linearTilingFeatures);
47     return (0 != (phy_features & features));
48 }
49 
ImageFormatAndFeaturesSupported(VkPhysicalDevice phy,VkFormat format,VkImageTiling tiling,VkFormatFeatureFlags features)50 bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
51     VkFormatProperties format_props;
52     vkGetPhysicalDeviceFormatProperties(phy, format, &format_props);
53     VkFormatFeatureFlags phy_features =
54         (VK_IMAGE_TILING_OPTIMAL == tiling ? format_props.optimalTilingFeatures : format_props.linearTilingFeatures);
55     return (features == (phy_features & features));
56 }
57 
ImageFormatAndFeaturesSupported(const VkInstance inst,const VkPhysicalDevice phy,const VkImageCreateInfo info,const VkFormatFeatureFlags features)58 bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevice phy, const VkImageCreateInfo info,
59                                      const VkFormatFeatureFlags features) {
60     // Verify physical device support of format features
61     if (!ImageFormatAndFeaturesSupported(phy, info.format, info.tiling, features)) {
62         return false;
63     }
64 
65     // Verify that PhysDevImageFormatProp() also claims support for the specific usage
66     VkImageFormatProperties props;
67     VkResult err =
68         vkGetPhysicalDeviceImageFormatProperties(phy, info.format, info.imageType, info.tiling, info.usage, info.flags, &props);
69     if (VK_SUCCESS != err) {
70         return false;
71     }
72 
73 #if 0  // Convinced this chunk doesn't currently add any additional info, but leaving in place because it may be
74        // necessary with future extensions
75 
76     // Verify again using version 2, if supported, which *can* return more property data than the original...
77     // (It's not clear that this is any more definitive than using the original version - but no harm)
78     PFN_vkGetPhysicalDeviceImageFormatProperties2KHR p_GetPDIFP2KHR =
79         (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)vkGetInstanceProcAddr(inst,
80                                                                                 "vkGetPhysicalDeviceImageFormatProperties2KHR");
81     if (NULL != p_GetPDIFP2KHR) {
82         VkPhysicalDeviceImageFormatInfo2KHR fmt_info{};
83         fmt_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR;
84         fmt_info.pNext = nullptr;
85         fmt_info.format = info.format;
86         fmt_info.type = info.imageType;
87         fmt_info.tiling = info.tiling;
88         fmt_info.usage = info.usage;
89         fmt_info.flags = info.flags;
90 
91         VkImageFormatProperties2KHR fmt_props = {};
92         fmt_props.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
93         err = p_GetPDIFP2KHR(phy, &fmt_info, &fmt_props);
94         if (VK_SUCCESS != err) {
95             return false;
96         }
97     }
98 #endif
99 
100     return true;
101 }
102 
myDbgFunc(VkFlags msgFlags,VkDebugReportObjectTypeEXT objType,uint64_t srcObject,size_t location,int32_t msgCode,const char * pLayerPrefix,const char * pMsg,void * pUserData)103 VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location,
104                                          int32_t msgCode, const char *pLayerPrefix, const char *pMsg, void *pUserData) {
105     ErrorMonitor *errMonitor = (ErrorMonitor *)pUserData;
106     if (msgFlags & errMonitor->GetMessageFlags()) {
107         return errMonitor->CheckForDesiredMsg(pMsg);
108     }
109     return VK_FALSE;
110 }
111 
GetPushDescriptorProperties(VkInstance instance,VkPhysicalDevice gpu)112 VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstance instance, VkPhysicalDevice gpu) {
113     // Find address of extension call and make the call -- assumes needed extensions are enabled.
114     PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
115         (PFN_vkGetPhysicalDeviceProperties2KHR)vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceProperties2KHR");
116     assert(vkGetPhysicalDeviceProperties2KHR != nullptr);
117 
118     // Get the push descriptor limits
119     auto push_descriptor_prop = lvl_init_struct<VkPhysicalDevicePushDescriptorPropertiesKHR>();
120     auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&push_descriptor_prop);
121     vkGetPhysicalDeviceProperties2KHR(gpu, &prop2);
122     return push_descriptor_prop;
123 }
124 
GetSubgroupProperties(VkInstance instance,VkPhysicalDevice gpu)125 VkPhysicalDeviceSubgroupProperties GetSubgroupProperties(VkInstance instance, VkPhysicalDevice gpu) {
126     auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
127 
128     auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&subgroup_prop);
129     vkGetPhysicalDeviceProperties2(gpu, &prop2);
130     return subgroup_prop;
131 }
132 
operator ==(const VkDebugUtilsLabelEXT & rhs,const VkDebugUtilsLabelEXT & lhs)133 bool operator==(const VkDebugUtilsLabelEXT &rhs, const VkDebugUtilsLabelEXT &lhs) {
134     bool is_equal = (rhs.color[0] == lhs.color[0]) && (rhs.color[1] == lhs.color[1]) && (rhs.color[2] == lhs.color[2]) &&
135                     (rhs.color[3] == lhs.color[3]);
136     if (is_equal) {
137         if (rhs.pLabelName && lhs.pLabelName) {
138             is_equal = (0 == strcmp(rhs.pLabelName, lhs.pLabelName));
139         } else {
140             is_equal = (rhs.pLabelName == nullptr) && (lhs.pLabelName == nullptr);
141         }
142     }
143     return is_equal;
144 }
145 
DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData,void * pUserData)146 VKAPI_ATTR VkBool32 VKAPI_CALL DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
147                                                   VkDebugUtilsMessageTypeFlagsEXT messageTypes,
148                                                   const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData) {
149     auto *data = reinterpret_cast<DebugUtilsLabelCheckData *>(pUserData);
150     data->callback(pCallbackData, data);
151     return VK_FALSE;
152 }
153 
154 #if GTEST_IS_THREADSAFE
AddToCommandBuffer(void * arg)155 extern "C" void *AddToCommandBuffer(void *arg) {
156     struct thread_data_struct *data = (struct thread_data_struct *)arg;
157 
158     for (int i = 0; i < 80000; i++) {
159         vkCmdSetEvent(data->commandBuffer, data->event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
160         if (data->bailout) {
161             break;
162         }
163     }
164     return NULL;
165 }
166 #endif  // GTEST_IS_THREADSAFE
167 
ReleaseNullFence(void * arg)168 extern "C" void *ReleaseNullFence(void *arg) {
169     struct thread_data_struct *data = (struct thread_data_struct *)arg;
170 
171     for (int i = 0; i < 40000; i++) {
172         vkDestroyFence(data->device, VK_NULL_HANDLE, NULL);
173         if (data->bailout) {
174             break;
175         }
176     }
177     return NULL;
178 }
179 
TestRenderPassCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo * create_info,bool rp2_supported,const char * rp1_vuid,const char * rp2_vuid)180 void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
181                           bool rp2_supported, const char *rp1_vuid, const char *rp2_vuid) {
182     VkRenderPass render_pass = VK_NULL_HANDLE;
183     VkResult err;
184 
185     if (rp1_vuid) {
186         error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp1_vuid);
187         err = vkCreateRenderPass(device, create_info, nullptr, &render_pass);
188         if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
189         error_monitor->VerifyFound();
190     }
191 
192     if (rp2_supported && rp2_vuid) {
193         PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
194             (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
195         safe_VkRenderPassCreateInfo2KHR create_info2;
196         ConvertVkRenderPassCreateInfoToV2KHR(create_info, &create_info2);
197 
198         error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
199         err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
200         if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
201         error_monitor->VerifyFound();
202     }
203 }
204 
PositiveTestRenderPassCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo * create_info,bool rp2_supported)205 void PositiveTestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
206                                   bool rp2_supported) {
207     VkRenderPass render_pass = VK_NULL_HANDLE;
208     VkResult err;
209 
210     error_monitor->ExpectSuccess();
211     err = vkCreateRenderPass(device, create_info, nullptr, &render_pass);
212     if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
213     error_monitor->VerifyNotFound();
214 
215     if (rp2_supported) {
216         PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
217             (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
218         safe_VkRenderPassCreateInfo2KHR create_info2;
219         ConvertVkRenderPassCreateInfoToV2KHR(create_info, &create_info2);
220 
221         error_monitor->ExpectSuccess();
222         err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
223         if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
224         error_monitor->VerifyNotFound();
225     }
226 }
227 
TestRenderPass2KHRCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo2KHR * create_info,const char * rp2_vuid)228 void TestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo2KHR *create_info,
229                               const char *rp2_vuid) {
230     VkRenderPass render_pass = VK_NULL_HANDLE;
231     VkResult err;
232     PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
233         (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
234 
235     error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
236     err = vkCreateRenderPass2KHR(device, create_info, nullptr, &render_pass);
237     if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
238     error_monitor->VerifyFound();
239 }
240 
TestRenderPassBegin(ErrorMonitor * error_monitor,const VkDevice device,const VkCommandBuffer command_buffer,const VkRenderPassBeginInfo * begin_info,bool rp2Supported,const char * rp1_vuid,const char * rp2_vuid)241 void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
242                          const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid) {
243     VkCommandBufferBeginInfo cmd_begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
244                                                VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
245 
246     if (rp1_vuid) {
247         vkBeginCommandBuffer(command_buffer, &cmd_begin_info);
248         error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp1_vuid);
249         vkCmdBeginRenderPass(command_buffer, begin_info, VK_SUBPASS_CONTENTS_INLINE);
250         error_monitor->VerifyFound();
251         vkResetCommandBuffer(command_buffer, 0);
252     }
253     if (rp2Supported && rp2_vuid) {
254         PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR =
255             (PFN_vkCmdBeginRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCmdBeginRenderPass2KHR");
256         VkSubpassBeginInfoKHR subpass_begin_info = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
257         vkBeginCommandBuffer(command_buffer, &cmd_begin_info);
258         error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
259         vkCmdBeginRenderPass2KHR(command_buffer, begin_info, &subpass_begin_info);
260         error_monitor->VerifyFound();
261         vkResetCommandBuffer(command_buffer, 0);
262     }
263 }
264 
ValidOwnershipTransferOp(ErrorMonitor * monitor,VkCommandBufferObj * cb,VkPipelineStageFlags src_stages,VkPipelineStageFlags dst_stages,const VkBufferMemoryBarrier * buf_barrier,const VkImageMemoryBarrier * img_barrier)265 void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
266                               VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
267                               const VkImageMemoryBarrier *img_barrier) {
268     monitor->ExpectSuccess();
269     cb->begin();
270     uint32_t num_buf_barrier = (buf_barrier) ? 1 : 0;
271     uint32_t num_img_barrier = (img_barrier) ? 1 : 0;
272     cb->PipelineBarrier(src_stages, dst_stages, 0, 0, nullptr, num_buf_barrier, buf_barrier, num_img_barrier, img_barrier);
273     cb->end();
274     cb->QueueCommandBuffer();  // Implicitly waits
275     monitor->VerifyNotFound();
276 }
277 
ValidOwnershipTransfer(ErrorMonitor * monitor,VkCommandBufferObj * cb_from,VkCommandBufferObj * cb_to,VkPipelineStageFlags src_stages,VkPipelineStageFlags dst_stages,const VkBufferMemoryBarrier * buf_barrier,const VkImageMemoryBarrier * img_barrier)278 void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
279                             VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
280                             const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier) {
281     ValidOwnershipTransferOp(monitor, cb_from, src_stages, dst_stages, buf_barrier, img_barrier);
282     ValidOwnershipTransferOp(monitor, cb_to, src_stages, dst_stages, buf_barrier, img_barrier);
283 }
284 
GPDIFPHelper(VkPhysicalDevice dev,const VkImageCreateInfo * ci,VkImageFormatProperties * limits)285 VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits) {
286     VkImageFormatProperties tmp_limits;
287     limits = limits ? limits : &tmp_limits;
288     return vkGetPhysicalDeviceImageFormatProperties(dev, ci->format, ci->imageType, ci->tiling, ci->usage, ci->flags, limits);
289 }
290 
FindFormatLinearWithoutMips(VkPhysicalDevice gpu,VkImageCreateInfo image_ci)291 VkFormat FindFormatLinearWithoutMips(VkPhysicalDevice gpu, VkImageCreateInfo image_ci) {
292     image_ci.tiling = VK_IMAGE_TILING_LINEAR;
293 
294     const VkFormat first_vk_format = static_cast<VkFormat>(1);
295     const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
296 
297     for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
298         image_ci.format = format;
299 
300         // WORKAROUND for dev_sim and mock_icd not containing valid format limits yet
301         VkFormatProperties format_props;
302         vkGetPhysicalDeviceFormatProperties(gpu, format, &format_props);
303         const VkFormatFeatureFlags core_filter = 0x1FFF;
304         const auto features = (image_ci.tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
305                                                                           : format_props.optimalTilingFeatures & core_filter;
306         if (!(features & core_filter)) continue;
307 
308         VkImageFormatProperties img_limits;
309         if (VK_SUCCESS == GPDIFPHelper(gpu, &image_ci, &img_limits) && img_limits.maxMipLevels == 1) return format;
310     }
311 
312     return VK_FORMAT_UNDEFINED;
313 }
314 
FindFormatWithoutSamples(VkPhysicalDevice gpu,VkImageCreateInfo & image_ci)315 bool FindFormatWithoutSamples(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
316     const VkFormat first_vk_format = static_cast<VkFormat>(1);
317     const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
318 
319     for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
320         image_ci.format = format;
321 
322         // WORKAROUND for dev_sim and mock_icd not containing valid format limits yet
323         VkFormatProperties format_props;
324         vkGetPhysicalDeviceFormatProperties(gpu, format, &format_props);
325         const VkFormatFeatureFlags core_filter = 0x1FFF;
326         const auto features = (image_ci.tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
327                                                                           : format_props.optimalTilingFeatures & core_filter;
328         if (!(features & core_filter)) continue;
329 
330         for (VkSampleCountFlagBits samples = VK_SAMPLE_COUNT_64_BIT; samples > 0;
331              samples = static_cast<VkSampleCountFlagBits>(samples >> 1)) {
332             image_ci.samples = samples;
333             VkImageFormatProperties img_limits;
334             if (VK_SUCCESS == GPDIFPHelper(gpu, &image_ci, &img_limits) && !(img_limits.sampleCounts & samples)) return true;
335         }
336     }
337 
338     return false;
339 }
340 
FindUnsupportedImage(VkPhysicalDevice gpu,VkImageCreateInfo & image_ci)341 bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
342     const VkFormat first_vk_format = static_cast<VkFormat>(1);
343     const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
344 
345     const std::vector<VkImageTiling> tilings = {VK_IMAGE_TILING_LINEAR, VK_IMAGE_TILING_OPTIMAL};
346     for (const auto tiling : tilings) {
347         image_ci.tiling = tiling;
348 
349         for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
350             image_ci.format = format;
351 
352             VkFormatProperties format_props;
353             vkGetPhysicalDeviceFormatProperties(gpu, format, &format_props);
354 
355             const VkFormatFeatureFlags core_filter = 0x1FFF;
356             const auto features = (tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
357                                                                      : format_props.optimalTilingFeatures & core_filter;
358             if (!(features & core_filter)) continue;  // We wand supported by features, but not by ImageFormatProperties
359 
360             // get as many usage flags as possible
361             image_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
362             if (features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) image_ci.usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
363             if (features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) image_ci.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
364             if (features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) image_ci.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
365             if (features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
366                 image_ci.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
367 
368             VkImageFormatProperties img_limits;
369             if (VK_ERROR_FORMAT_NOT_SUPPORTED == GPDIFPHelper(gpu, &image_ci, &img_limits)) {
370                 return true;
371             }
372         }
373     }
374 
375     return false;
376 }
377 
FindFormatWithoutFeatures(VkPhysicalDevice gpu,VkImageTiling tiling,VkFormatFeatureFlags undesired_features)378 VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling, VkFormatFeatureFlags undesired_features) {
379     const VkFormat first_vk_format = static_cast<VkFormat>(1);
380     const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
381 
382     for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
383         VkFormatProperties format_props;
384         vkGetPhysicalDeviceFormatProperties(gpu, format, &format_props);
385 
386         const VkFormatFeatureFlags core_filter = 0x1FFF;
387         const auto features = (tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
388                                                                  : format_props.optimalTilingFeatures & core_filter;
389 
390         const auto valid_features = features & core_filter;
391         if (undesired_features == UINT32_MAX) {
392             if (!valid_features) return format;
393         } else {
394             if (valid_features && !(valid_features & undesired_features)) return format;
395         }
396     }
397 
398     return VK_FORMAT_UNDEFINED;
399 }
400 
NegHeightViewportTests(VkDeviceObj * m_device,VkCommandBufferObj * m_commandBuffer,ErrorMonitor * m_errorMonitor)401 void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_commandBuffer, ErrorMonitor *m_errorMonitor) {
402     const auto &limits = m_device->props.limits;
403 
404     m_commandBuffer->begin();
405 
406     using std::vector;
407     struct TestCase {
408         VkViewport vp;
409         vector<std::string> vuids;
410     };
411 
412     // not necessarily boundary values (unspecified cast rounding), but guaranteed to be over limit
413     const auto one_before_min_h = NearestSmaller(-static_cast<float>(limits.maxViewportDimensions[1]));
414     const auto one_past_max_h = NearestGreater(static_cast<float>(limits.maxViewportDimensions[1]));
415 
416     const auto min_bound = limits.viewportBoundsRange[0];
417     const auto max_bound = limits.viewportBoundsRange[1];
418     const auto one_before_min_bound = NearestSmaller(min_bound);
419     const auto one_past_max_bound = NearestGreater(max_bound);
420 
421     const vector<TestCase> test_cases = {{{0.0, 0.0, 64.0, one_before_min_h, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
422                                          {{0.0, 0.0, 64.0, one_past_max_h, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
423                                          {{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
424                                          {{0.0, one_before_min_bound, 64.0, 1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01775"}},
425                                          {{0.0, one_past_max_bound, 64.0, -1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01776"}},
426                                          {{0.0, min_bound, 64.0, -1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01777"}},
427                                          {{0.0, max_bound, 64.0, 1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01233"}}};
428 
429     for (const auto &test_case : test_cases) {
430         for (const auto vuid : test_case.vuids) {
431             if (vuid == "VUID-Undefined")
432                 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
433                                                      "is less than VkPhysicalDeviceLimits::viewportBoundsRange[0]");
434             else
435                 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
436         }
437         vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &test_case.vp);
438         m_errorMonitor->VerifyFound();
439     }
440 }
441 
CreateSamplerTest(VkLayerTest & test,const VkSamplerCreateInfo * pCreateInfo,std::string code)442 void CreateSamplerTest(VkLayerTest &test, const VkSamplerCreateInfo *pCreateInfo, std::string code) {
443     VkResult err;
444     VkSampler sampler = VK_NULL_HANDLE;
445     if (code.length())
446         test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, code);
447     else
448         test.Monitor()->ExpectSuccess();
449 
450     err = vkCreateSampler(test.device(), pCreateInfo, NULL, &sampler);
451     if (code.length())
452         test.Monitor()->VerifyFound();
453     else
454         test.Monitor()->VerifyNotFound();
455 
456     if (VK_SUCCESS == err) {
457         vkDestroySampler(test.device(), sampler, NULL);
458     }
459 }
460 
CreateBufferTest(VkLayerTest & test,const VkBufferCreateInfo * pCreateInfo,std::string code)461 void CreateBufferTest(VkLayerTest &test, const VkBufferCreateInfo *pCreateInfo, std::string code) {
462     VkResult err;
463     VkBuffer buffer = VK_NULL_HANDLE;
464     if (code.length())
465         test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
466     else
467         test.Monitor()->ExpectSuccess();
468 
469     err = vkCreateBuffer(test.device(), pCreateInfo, NULL, &buffer);
470     if (code.length())
471         test.Monitor()->VerifyFound();
472     else
473         test.Monitor()->VerifyNotFound();
474 
475     if (VK_SUCCESS == err) {
476         vkDestroyBuffer(test.device(), buffer, NULL);
477     }
478 }
479 
CreateImageTest(VkLayerTest & test,const VkImageCreateInfo * pCreateInfo,std::string code)480 void CreateImageTest(VkLayerTest &test, const VkImageCreateInfo *pCreateInfo, std::string code) {
481     VkResult err;
482     VkImage image = VK_NULL_HANDLE;
483     if (code.length())
484         test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
485     else
486         test.Monitor()->ExpectSuccess();
487 
488     err = vkCreateImage(test.device(), pCreateInfo, NULL, &image);
489     if (code.length())
490         test.Monitor()->VerifyFound();
491     else
492         test.Monitor()->VerifyNotFound();
493 
494     if (VK_SUCCESS == err) {
495         vkDestroyImage(test.device(), image, NULL);
496     }
497 }
498 
CreateBufferViewTest(VkLayerTest & test,const VkBufferViewCreateInfo * pCreateInfo,const std::vector<std::string> & codes)499 void CreateBufferViewTest(VkLayerTest &test, const VkBufferViewCreateInfo *pCreateInfo, const std::vector<std::string> &codes) {
500     VkResult err;
501     VkBufferView view = VK_NULL_HANDLE;
502     if (codes.size())
503         std::for_each(codes.begin(), codes.end(),
504                       [&](const std::string &s) { test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, s); });
505     else
506         test.Monitor()->ExpectSuccess();
507 
508     err = vkCreateBufferView(test.device(), pCreateInfo, NULL, &view);
509     if (codes.size())
510         test.Monitor()->VerifyFound();
511     else
512         test.Monitor()->VerifyNotFound();
513 
514     if (VK_SUCCESS == err) {
515         vkDestroyBufferView(test.device(), view, NULL);
516     }
517 }
518 
CreateImageViewTest(VkLayerTest & test,const VkImageViewCreateInfo * pCreateInfo,std::string code)519 void CreateImageViewTest(VkLayerTest &test, const VkImageViewCreateInfo *pCreateInfo, std::string code) {
520     VkResult err;
521     VkImageView view = VK_NULL_HANDLE;
522     if (code.length())
523         test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
524     else
525         test.Monitor()->ExpectSuccess();
526 
527     err = vkCreateImageView(test.device(), pCreateInfo, NULL, &view);
528     if (code.length())
529         test.Monitor()->VerifyFound();
530     else
531         test.Monitor()->VerifyNotFound();
532 
533     if (VK_SUCCESS == err) {
534         vkDestroyImageView(test.device(), view, NULL);
535     }
536 }
537 
SafeSaneSamplerCreateInfo()538 VkSamplerCreateInfo SafeSaneSamplerCreateInfo() {
539     VkSamplerCreateInfo sampler_create_info = {};
540     sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
541     sampler_create_info.pNext = nullptr;
542     sampler_create_info.magFilter = VK_FILTER_NEAREST;
543     sampler_create_info.minFilter = VK_FILTER_NEAREST;
544     sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
545     sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
546     sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
547     sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
548     sampler_create_info.mipLodBias = 0.0;
549     sampler_create_info.anisotropyEnable = VK_FALSE;
550     sampler_create_info.maxAnisotropy = 1.0;
551     sampler_create_info.compareEnable = VK_FALSE;
552     sampler_create_info.compareOp = VK_COMPARE_OP_NEVER;
553     sampler_create_info.minLod = 0.0;
554     sampler_create_info.maxLod = 16.0;
555     sampler_create_info.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
556     sampler_create_info.unnormalizedCoordinates = VK_FALSE;
557 
558     return sampler_create_info;
559 }
560 
SafeSaneImageViewCreateInfo(VkImage image,VkFormat format,VkImageAspectFlags aspect_mask)561 VkImageViewCreateInfo SafeSaneImageViewCreateInfo(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask) {
562     VkImageViewCreateInfo image_view_create_info = {};
563     image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
564     image_view_create_info.image = image;
565     image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
566     image_view_create_info.format = format;
567     image_view_create_info.subresourceRange.layerCount = 1;
568     image_view_create_info.subresourceRange.baseMipLevel = 0;
569     image_view_create_info.subresourceRange.levelCount = 1;
570     image_view_create_info.subresourceRange.aspectMask = aspect_mask;
571 
572     return image_view_create_info;
573 }
574 
SafeSaneImageViewCreateInfo(const VkImageObj & image,VkFormat format,VkImageAspectFlags aspect_mask)575 VkImageViewCreateInfo SafeSaneImageViewCreateInfo(const VkImageObj &image, VkFormat format, VkImageAspectFlags aspect_mask) {
576     return SafeSaneImageViewCreateInfo(image.handle(), format, aspect_mask);
577 }
578 
CheckCreateRenderPass2Support(VkRenderFramework * renderFramework,std::vector<const char * > & device_extension_names)579 bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names) {
580     if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
581         device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
582         device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
583         device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
584         return true;
585     }
586     return false;
587 }
588 
CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework * renderFramework,std::vector<const char * > & instance_extension_names,std::vector<const char * > & device_extension_names,VkValidationFeaturesEXT * features,void * userData)589 bool CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework *renderFramework,
590                                                     std::vector<const char *> &instance_extension_names,
591                                                     std::vector<const char *> &device_extension_names,
592                                                     VkValidationFeaturesEXT *features, void *userData) {
593     bool descriptor_indexing = renderFramework->InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
594     if (descriptor_indexing) {
595         instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
596     }
597     renderFramework->InitFramework(myDbgFunc, userData, features);
598     descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr,
599                                                                                            VK_KHR_MAINTENANCE3_EXTENSION_NAME);
600     descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(
601                                                      renderFramework->gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
602     if (descriptor_indexing) {
603         device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
604         device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
605         return true;
606     }
607     return false;
608 }
609 
ErrorMonitor()610 ErrorMonitor::ErrorMonitor() {
611     test_platform_thread_create_mutex(&mutex_);
612     test_platform_thread_lock_mutex(&mutex_);
613     Reset();
614     test_platform_thread_unlock_mutex(&mutex_);
615 }
616 
~ErrorMonitor()617 ErrorMonitor::~ErrorMonitor() { test_platform_thread_delete_mutex(&mutex_); }
618 
Reset()619 void ErrorMonitor::Reset() {
620     message_flags_ = VK_DEBUG_REPORT_ERROR_BIT_EXT;
621     bailout_ = NULL;
622     message_found_ = VK_FALSE;
623     failure_message_strings_.clear();
624     desired_message_strings_.clear();
625     ignore_message_strings_.clear();
626     other_messages_.clear();
627 }
628 
SetDesiredFailureMsg(const VkFlags msgFlags,const std::string msg)629 void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const std::string msg) {
630     SetDesiredFailureMsg(msgFlags, msg.c_str());
631 }
632 
SetDesiredFailureMsg(const VkFlags msgFlags,const char * const msgString)633 void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString) {
634     test_platform_thread_lock_mutex(&mutex_);
635     desired_message_strings_.insert(msgString);
636     message_flags_ |= msgFlags;
637     test_platform_thread_unlock_mutex(&mutex_);
638 }
639 
SetUnexpectedError(const char * const msg)640 void ErrorMonitor::SetUnexpectedError(const char *const msg) {
641     test_platform_thread_lock_mutex(&mutex_);
642 
643     ignore_message_strings_.emplace_back(msg);
644 
645     test_platform_thread_unlock_mutex(&mutex_);
646 }
647 
CheckForDesiredMsg(const char * const msgString)648 VkBool32 ErrorMonitor::CheckForDesiredMsg(const char *const msgString) {
649     VkBool32 result = VK_FALSE;
650     test_platform_thread_lock_mutex(&mutex_);
651     if (bailout_ != nullptr) {
652         *bailout_ = true;
653     }
654     string errorString(msgString);
655     bool found_expected = false;
656 
657     if (!IgnoreMessage(errorString)) {
658         for (auto desired_msg_it = desired_message_strings_.begin(); desired_msg_it != desired_message_strings_.end();
659              ++desired_msg_it) {
660             if ((*desired_msg_it).length() == 0) {
661                 // An empty desired_msg string "" indicates a positive test - not expecting an error.
662                 // Return true to avoid calling layers/driver with this error.
663                 // And don't erase the "" string, so it remains if another error is found.
664                 result = VK_TRUE;
665                 found_expected = true;
666                 message_found_ = true;
667                 failure_message_strings_.insert(errorString);
668             } else if (errorString.find(*desired_msg_it) != string::npos) {
669                 found_expected = true;
670                 failure_message_strings_.insert(errorString);
671                 message_found_ = true;
672                 result = VK_TRUE;
673                 // Remove a maximum of one failure message from the set
674                 // Multiset mutation is acceptable because `break` causes flow of control to exit the for loop
675                 desired_message_strings_.erase(desired_msg_it);
676                 break;
677             }
678         }
679 
680         if (!found_expected) {
681             printf("Unexpected: %s\n", msgString);
682             other_messages_.push_back(errorString);
683         }
684     }
685 
686     test_platform_thread_unlock_mutex(&mutex_);
687     return result;
688 }
689 
GetOtherFailureMsgs() const690 vector<string> ErrorMonitor::GetOtherFailureMsgs() const { return other_messages_; }
691 
GetMessageFlags() const692 VkDebugReportFlagsEXT ErrorMonitor::GetMessageFlags() const { return message_flags_; }
693 
AnyDesiredMsgFound() const694 bool ErrorMonitor::AnyDesiredMsgFound() const { return message_found_; }
695 
AllDesiredMsgsFound() const696 bool ErrorMonitor::AllDesiredMsgsFound() const { return desired_message_strings_.empty(); }
697 
SetError(const char * const errorString)698 void ErrorMonitor::SetError(const char *const errorString) {
699     message_found_ = true;
700     failure_message_strings_.insert(errorString);
701 }
702 
SetBailout(bool * bailout)703 void ErrorMonitor::SetBailout(bool *bailout) { bailout_ = bailout; }
704 
DumpFailureMsgs() const705 void ErrorMonitor::DumpFailureMsgs() const {
706     vector<string> otherMsgs = GetOtherFailureMsgs();
707     if (otherMsgs.size()) {
708         cout << "Other error messages logged for this test were:" << endl;
709         for (auto iter = otherMsgs.begin(); iter != otherMsgs.end(); iter++) {
710             cout << "     " << *iter << endl;
711         }
712     }
713 }
714 
ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask)715 void ErrorMonitor::ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask) {
716     // Match ANY message matching specified type
717     SetDesiredFailureMsg(message_flag_mask, "");
718     message_flags_ = message_flag_mask;  // override mask handling in SetDesired...
719 }
720 
VerifyFound()721 void ErrorMonitor::VerifyFound() {
722     // Not receiving expected message(s) is a failure. /Before/ throwing, dump any other messages
723     if (!AllDesiredMsgsFound()) {
724         DumpFailureMsgs();
725         for (const auto desired_msg : desired_message_strings_) {
726             ADD_FAILURE() << "Did not receive expected error '" << desired_msg << "'";
727         }
728     } else if (GetOtherFailureMsgs().size() > 0) {
729         // Fail test case for any unexpected errors
730 #if defined(ANDROID)
731         // This will get unexpected errors into the adb log
732         for (auto msg : other_messages_) {
733             __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
734         }
735 #else
736         ADD_FAILURE() << "Received unexpected error(s).";
737 #endif
738     }
739     Reset();
740 }
741 
VerifyNotFound()742 void ErrorMonitor::VerifyNotFound() {
743     // ExpectSuccess() configured us to match anything. Any error is a failure.
744     if (AnyDesiredMsgFound()) {
745         DumpFailureMsgs();
746         for (const auto msg : failure_message_strings_) {
747             ADD_FAILURE() << "Expected to succeed but got error: " << msg;
748         }
749     } else if (GetOtherFailureMsgs().size() > 0) {
750         // Fail test case for any unexpected errors
751 #if defined(ANDROID)
752         // This will get unexpected errors into the adb log
753         for (auto msg : other_messages_) {
754             __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
755         }
756 #else
757         ADD_FAILURE() << "Received unexpected error(s).";
758 #endif
759     }
760     Reset();
761 }
762 
IgnoreMessage(std::string const & msg) const763 bool ErrorMonitor::IgnoreMessage(std::string const &msg) const {
764     if (ignore_message_strings_.empty()) {
765         return false;
766     }
767 
768     return std::find_if(ignore_message_strings_.begin(), ignore_message_strings_.end(), [&msg](std::string const &str) {
769                return msg.find(str) != std::string::npos;
770            }) != ignore_message_strings_.end();
771 }
772 
VKTriangleTest(BsoFailSelect failCase)773 void VkLayerTest::VKTriangleTest(BsoFailSelect failCase) {
774     ASSERT_TRUE(m_device && m_device->initialized());  // VKTriangleTest assumes Init() has finished
775 
776     ASSERT_NO_FATAL_FAILURE(InitViewport());
777 
778     VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
779     VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
780 
781     VkPipelineObj pipelineobj(m_device);
782     pipelineobj.AddDefaultColorAttachment();
783     pipelineobj.AddShader(&vs);
784     pipelineobj.AddShader(&ps);
785 
786     bool failcase_needs_depth = false;  // to mark cases that need depth attachment
787 
788     VkBufferObj index_buffer;
789 
790     switch (failCase) {
791         case BsoFailLineWidth: {
792             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_WIDTH);
793             VkPipelineInputAssemblyStateCreateInfo ia_state = {};
794             ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
795             ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
796             pipelineobj.SetInputAssembly(&ia_state);
797             break;
798         }
799         case BsoFailLineStipple: {
800             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_STIPPLE_EXT);
801             VkPipelineInputAssemblyStateCreateInfo ia_state = {};
802             ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
803             ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
804             pipelineobj.SetInputAssembly(&ia_state);
805 
806             VkPipelineRasterizationLineStateCreateInfoEXT line_state = {};
807             line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
808             line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
809             line_state.stippledLineEnable = VK_TRUE;
810             line_state.lineStippleFactor = 0;
811             line_state.lineStipplePattern = 0;
812             pipelineobj.SetLineState(&line_state);
813             break;
814         }
815         case BsoFailDepthBias: {
816             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BIAS);
817             VkPipelineRasterizationStateCreateInfo rs_state = {};
818             rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
819             rs_state.depthBiasEnable = VK_TRUE;
820             rs_state.lineWidth = 1.0f;
821             pipelineobj.SetRasterization(&rs_state);
822             break;
823         }
824         case BsoFailViewport: {
825             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
826             break;
827         }
828         case BsoFailScissor: {
829             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
830             break;
831         }
832         case BsoFailBlend: {
833             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
834             VkPipelineColorBlendAttachmentState att_state = {};
835             att_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
836             att_state.blendEnable = VK_TRUE;
837             pipelineobj.AddColorAttachment(0, att_state);
838             break;
839         }
840         case BsoFailDepthBounds: {
841             failcase_needs_depth = true;
842             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
843             break;
844         }
845         case BsoFailStencilReadMask: {
846             failcase_needs_depth = true;
847             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
848             break;
849         }
850         case BsoFailStencilWriteMask: {
851             failcase_needs_depth = true;
852             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
853             break;
854         }
855         case BsoFailStencilReference: {
856             failcase_needs_depth = true;
857             pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
858             break;
859         }
860 
861         case BsoFailIndexBuffer:
862             break;
863         case BsoFailIndexBufferBadSize:
864         case BsoFailIndexBufferBadOffset:
865         case BsoFailIndexBufferBadMapSize:
866         case BsoFailIndexBufferBadMapOffset: {
867             // Create an index buffer for these tests.
868             // There is no need to populate it because we should bail before trying to draw.
869             uint32_t const indices[] = {0};
870             VkBufferCreateInfo buffer_info = {};
871             buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
872             buffer_info.size = 1024;
873             buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
874             buffer_info.queueFamilyIndexCount = 1;
875             buffer_info.pQueueFamilyIndices = indices;
876             index_buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
877         } break;
878         case BsoFailCmdClearAttachments:
879             break;
880         case BsoFailNone:
881             break;
882         default:
883             break;
884     }
885 
886     VkDescriptorSetObj descriptorSet(m_device);
887 
888     VkImageView *depth_attachment = nullptr;
889     if (failcase_needs_depth) {
890         m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
891         ASSERT_TRUE(m_depth_stencil_fmt != VK_FORMAT_UNDEFINED);
892 
893         m_depthStencil->Init(m_device, static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height), m_depth_stencil_fmt,
894                              VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
895         depth_attachment = m_depthStencil->BindInfo();
896     }
897 
898     ASSERT_NO_FATAL_FAILURE(InitRenderTarget(1, depth_attachment));
899     m_commandBuffer->begin();
900 
901     GenericDrawPreparation(m_commandBuffer, pipelineobj, descriptorSet, failCase);
902 
903     m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
904 
905     // render triangle
906     if (failCase == BsoFailIndexBuffer) {
907         // Use DrawIndexed w/o an index buffer bound
908         m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
909     } else if (failCase == BsoFailIndexBufferBadSize) {
910         // Bind the index buffer and draw one too many indices
911         m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
912         m_commandBuffer->DrawIndexed(513, 1, 0, 0, 0);
913     } else if (failCase == BsoFailIndexBufferBadOffset) {
914         // Bind the index buffer and draw one past the end of the buffer using the offset
915         m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
916         m_commandBuffer->DrawIndexed(512, 1, 1, 0, 0);
917     } else if (failCase == BsoFailIndexBufferBadMapSize) {
918         // Bind the index buffer at the middle point and draw one too many indices
919         m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
920         m_commandBuffer->DrawIndexed(257, 1, 0, 0, 0);
921     } else if (failCase == BsoFailIndexBufferBadMapOffset) {
922         // Bind the index buffer at the middle point and draw one past the end of the buffer
923         m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
924         m_commandBuffer->DrawIndexed(256, 1, 1, 0, 0);
925     } else {
926         m_commandBuffer->Draw(3, 1, 0, 0);
927     }
928 
929     if (failCase == BsoFailCmdClearAttachments) {
930         VkClearAttachment color_attachment = {};
931         color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
932         color_attachment.colorAttachment = 2000000000;  // Someone who knew what they were doing would use 0 for the index;
933         VkClearRect clear_rect = {{{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}}, 0, 1};
934 
935         vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
936     }
937 
938     // finalize recording of the command buffer
939     m_commandBuffer->EndRenderPass();
940     m_commandBuffer->end();
941     m_commandBuffer->QueueCommandBuffer(true);
942     DestroyRenderTarget();
943 }
944 
GenericDrawPreparation(VkCommandBufferObj * commandBuffer,VkPipelineObj & pipelineobj,VkDescriptorSetObj & descriptorSet,BsoFailSelect failCase)945 void VkLayerTest::GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj,
946                                          VkDescriptorSetObj &descriptorSet, BsoFailSelect failCase) {
947     commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
948 
949     commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
950     // Make sure depthWriteEnable is set so that Depth fail test will work
951     // correctly
952     // Make sure stencilTestEnable is set so that Stencil fail test will work
953     // correctly
954     VkStencilOpState stencil = {};
955     stencil.failOp = VK_STENCIL_OP_KEEP;
956     stencil.passOp = VK_STENCIL_OP_KEEP;
957     stencil.depthFailOp = VK_STENCIL_OP_KEEP;
958     stencil.compareOp = VK_COMPARE_OP_NEVER;
959 
960     VkPipelineDepthStencilStateCreateInfo ds_ci = {};
961     ds_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
962     ds_ci.pNext = NULL;
963     ds_ci.depthTestEnable = VK_FALSE;
964     ds_ci.depthWriteEnable = VK_TRUE;
965     ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
966     ds_ci.depthBoundsTestEnable = VK_FALSE;
967     if (failCase == BsoFailDepthBounds) {
968         ds_ci.depthBoundsTestEnable = VK_TRUE;
969         ds_ci.maxDepthBounds = 0.0f;
970         ds_ci.minDepthBounds = 0.0f;
971     }
972     ds_ci.stencilTestEnable = VK_TRUE;
973     ds_ci.front = stencil;
974     ds_ci.back = stencil;
975 
976     pipelineobj.SetDepthStencil(&ds_ci);
977     pipelineobj.SetViewport(m_viewports);
978     pipelineobj.SetScissor(m_scissors);
979     descriptorSet.CreateVKDescriptorSet(commandBuffer);
980     VkResult err = pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
981     ASSERT_VK_SUCCESS(err);
982     vkCmdBindPipeline(commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineobj.handle());
983     commandBuffer->BindDescriptorSet(descriptorSet);
984 }
985 
Init(VkPhysicalDeviceFeatures * features,VkPhysicalDeviceFeatures2 * features2,const VkCommandPoolCreateFlags flags,void * instance_pnext)986 void VkLayerTest::Init(VkPhysicalDeviceFeatures *features, VkPhysicalDeviceFeatures2 *features2,
987                        const VkCommandPoolCreateFlags flags, void *instance_pnext) {
988     InitFramework(myDbgFunc, m_errorMonitor, instance_pnext);
989     InitState(features, features2, flags);
990 }
991 
Monitor()992 ErrorMonitor *VkLayerTest::Monitor() { return m_errorMonitor; }
993 
CommandBuffer()994 VkCommandBufferObj *VkLayerTest::CommandBuffer() { return m_commandBuffer; }
995 
VkLayerTest()996 VkLayerTest::VkLayerTest() {
997     m_enableWSI = false;
998 
999     m_instance_layer_names.clear();
1000     m_instance_extension_names.clear();
1001     m_device_extension_names.clear();
1002 
1003     // Add default instance extensions to the list
1004     m_instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
1005 
1006     if (VkTestFramework::m_khronos_layer_disable) {
1007         m_instance_layer_names.push_back("VK_LAYER_GOOGLE_threading");
1008         m_instance_layer_names.push_back("VK_LAYER_LUNARG_parameter_validation");
1009         m_instance_layer_names.push_back("VK_LAYER_LUNARG_object_tracker");
1010         m_instance_layer_names.push_back("VK_LAYER_LUNARG_core_validation");
1011         m_instance_layer_names.push_back("VK_LAYER_GOOGLE_unique_objects");
1012     } else {
1013         m_instance_layer_names.push_back("VK_LAYER_KHRONOS_validation");
1014     }
1015     if (VkTestFramework::m_devsim_layer) {
1016         if (InstanceLayerSupported("VK_LAYER_LUNARG_device_simulation")) {
1017             m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_simulation");
1018         } else {
1019             VkTestFramework::m_devsim_layer = false;
1020             printf("             Did not find VK_LAYER_LUNARG_device_simulation layer so it will not be enabled.\n");
1021         }
1022     }
1023 
1024     this->app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
1025     this->app_info.pNext = NULL;
1026     this->app_info.pApplicationName = "layer_tests";
1027     this->app_info.applicationVersion = 1;
1028     this->app_info.pEngineName = "unittest";
1029     this->app_info.engineVersion = 1;
1030     this->app_info.apiVersion = VK_API_VERSION_1_0;
1031 
1032     m_errorMonitor = new ErrorMonitor;
1033 
1034     // Find out what version the instance supports and record the default target instance
1035     auto enumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
1036     if (enumerateInstanceVersion) {
1037         enumerateInstanceVersion(&m_instance_api_version);
1038     } else {
1039         m_instance_api_version = VK_API_VERSION_1_0;
1040     }
1041     m_target_api_version = app_info.apiVersion;
1042 }
1043 
AddSurfaceInstanceExtension()1044 bool VkLayerTest::AddSurfaceInstanceExtension() {
1045     m_enableWSI = true;
1046     if (!InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
1047         printf("%s VK_KHR_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
1048         return false;
1049     }
1050     m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
1051 
1052     bool bSupport = false;
1053 #if defined(VK_USE_PLATFORM_WIN32_KHR)
1054     if (!InstanceExtensionSupported(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
1055         printf("%s VK_KHR_WIN32_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
1056         return false;
1057     }
1058     m_instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
1059     bSupport = true;
1060 #endif
1061 
1062 #if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
1063     if (!InstanceExtensionSupported(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) {
1064         printf("%s VK_KHR_ANDROID_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
1065         return false;
1066     }
1067     m_instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
1068     bSupport = true;
1069 #endif
1070 
1071 #if defined(VK_USE_PLATFORM_XLIB_KHR)
1072     if (!InstanceExtensionSupported(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
1073         printf("%s VK_KHR_XLIB_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
1074         return false;
1075     }
1076     if (XOpenDisplay(NULL)) {
1077         m_instance_extension_names.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
1078         bSupport = true;
1079     }
1080 #endif
1081 
1082 #if defined(VK_USE_PLATFORM_XCB_KHR)
1083     if (!InstanceExtensionSupported(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
1084         printf("%s VK_KHR_XCB_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
1085         return false;
1086     }
1087     if (!bSupport && xcb_connect(NULL, NULL)) {
1088         m_instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
1089         bSupport = true;
1090     }
1091 #endif
1092 
1093     if (bSupport) return true;
1094     printf("%s No platform's surface extension supported\n", kSkipPrefix);
1095     return false;
1096 }
1097 
AddSwapchainDeviceExtension()1098 bool VkLayerTest::AddSwapchainDeviceExtension() {
1099     if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
1100         printf("%s VK_KHR_SWAPCHAIN_EXTENSION_NAME extension not supported\n", kSkipPrefix);
1101         return false;
1102     }
1103     m_device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1104     return true;
1105 }
1106 
SetTargetApiVersion(uint32_t target_api_version)1107 uint32_t VkLayerTest::SetTargetApiVersion(uint32_t target_api_version) {
1108     if (target_api_version == 0) target_api_version = VK_API_VERSION_1_0;
1109     if (target_api_version <= m_instance_api_version) {
1110         m_target_api_version = target_api_version;
1111         app_info.apiVersion = m_target_api_version;
1112     }
1113     return m_target_api_version;
1114 }
DeviceValidationVersion()1115 uint32_t VkLayerTest::DeviceValidationVersion() {
1116     // The validation layers, assume the version we are validating to is the apiVersion unless the device apiVersion is lower
1117     VkPhysicalDeviceProperties props;
1118     GetPhysicalDeviceProperties(&props);
1119     return std::min(m_target_api_version, props.apiVersion);
1120 }
1121 
LoadDeviceProfileLayer(PFN_vkSetPhysicalDeviceFormatPropertiesEXT & fpvkSetPhysicalDeviceFormatPropertiesEXT,PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT & fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)1122 bool VkLayerTest::LoadDeviceProfileLayer(
1123     PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
1124     PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT) {
1125     // Load required functions
1126     fpvkSetPhysicalDeviceFormatPropertiesEXT =
1127         (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
1128     fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(
1129         instance(), "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
1130 
1131     if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
1132         printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
1133         return 0;
1134     }
1135 
1136     return 1;
1137 }
1138 
~VkLayerTest()1139 VkLayerTest::~VkLayerTest() {
1140     // Clean up resources before we reset
1141     delete m_errorMonitor;
1142 }
1143 
GetTestConditionValid(VkDeviceObj * aVulkanDevice,eTestEnFlags aTestFlag,VkBufferUsageFlags aBufferUsage)1144 bool VkBufferTest::GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage) {
1145     if (eInvalidDeviceOffset != aTestFlag && eInvalidMemoryOffset != aTestFlag) {
1146         return true;
1147     }
1148     VkDeviceSize offset_limit = 0;
1149     if (eInvalidMemoryOffset == aTestFlag) {
1150         VkBuffer vulkanBuffer;
1151         VkBufferCreateInfo buffer_create_info = {};
1152         buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1153         buffer_create_info.size = 32;
1154         buffer_create_info.usage = aBufferUsage;
1155 
1156         vkCreateBuffer(aVulkanDevice->device(), &buffer_create_info, nullptr, &vulkanBuffer);
1157         VkMemoryRequirements memory_reqs = {};
1158 
1159         vkGetBufferMemoryRequirements(aVulkanDevice->device(), vulkanBuffer, &memory_reqs);
1160         vkDestroyBuffer(aVulkanDevice->device(), vulkanBuffer, nullptr);
1161         offset_limit = memory_reqs.alignment;
1162     } else if ((VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) & aBufferUsage) {
1163         offset_limit = aVulkanDevice->props.limits.minTexelBufferOffsetAlignment;
1164     } else if (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT & aBufferUsage) {
1165         offset_limit = aVulkanDevice->props.limits.minUniformBufferOffsetAlignment;
1166     } else if (VK_BUFFER_USAGE_STORAGE_BUFFER_BIT & aBufferUsage) {
1167         offset_limit = aVulkanDevice->props.limits.minStorageBufferOffsetAlignment;
1168     }
1169     return eOffsetAlignment < offset_limit;
1170 }
1171 
VkBufferTest(VkDeviceObj * aVulkanDevice,VkBufferUsageFlags aBufferUsage,eTestEnFlags aTestFlag)1172 VkBufferTest::VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag)
1173     : AllocateCurrent(true),
1174       BoundCurrent(false),
1175       CreateCurrent(false),
1176       InvalidDeleteEn(false),
1177       VulkanDevice(aVulkanDevice->device()) {
1178     if (eBindNullBuffer == aTestFlag || eBindFakeBuffer == aTestFlag) {
1179         VkMemoryAllocateInfo memory_allocate_info = {};
1180         memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1181         memory_allocate_info.allocationSize = 1;   // fake size -- shouldn't matter for the test
1182         memory_allocate_info.memoryTypeIndex = 0;  // fake type -- shouldn't matter for the test
1183         vkAllocateMemory(VulkanDevice, &memory_allocate_info, nullptr, &VulkanMemory);
1184 
1185         VulkanBuffer = (aTestFlag == eBindNullBuffer) ? VK_NULL_HANDLE : (VkBuffer)0xCDCDCDCDCDCDCDCD;
1186 
1187         vkBindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, 0);
1188     } else {
1189         VkBufferCreateInfo buffer_create_info = {};
1190         buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1191         buffer_create_info.size = 32;
1192         buffer_create_info.usage = aBufferUsage;
1193 
1194         vkCreateBuffer(VulkanDevice, &buffer_create_info, nullptr, &VulkanBuffer);
1195 
1196         CreateCurrent = true;
1197 
1198         VkMemoryRequirements memory_requirements;
1199         vkGetBufferMemoryRequirements(VulkanDevice, VulkanBuffer, &memory_requirements);
1200 
1201         VkMemoryAllocateInfo memory_allocate_info = {};
1202         memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1203         memory_allocate_info.allocationSize = memory_requirements.size + eOffsetAlignment;
1204         bool pass = aVulkanDevice->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info,
1205                                                          VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1206         if (!pass) {
1207             CreateCurrent = false;
1208             vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
1209             return;
1210         }
1211 
1212         vkAllocateMemory(VulkanDevice, &memory_allocate_info, NULL, &VulkanMemory);
1213         // NB: 1 is intentionally an invalid offset value
1214         const bool offset_en = eInvalidDeviceOffset == aTestFlag || eInvalidMemoryOffset == aTestFlag;
1215         vkBindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, offset_en ? eOffsetAlignment : 0);
1216         BoundCurrent = true;
1217 
1218         InvalidDeleteEn = (eFreeInvalidHandle == aTestFlag);
1219     }
1220 }
1221 
~VkBufferTest()1222 VkBufferTest::~VkBufferTest() {
1223     if (CreateCurrent) {
1224         vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
1225     }
1226     if (AllocateCurrent) {
1227         if (InvalidDeleteEn) {
1228             auto bad_memory = CastFromUint64<VkDeviceMemory>(CastToUint64(VulkanMemory) + 1);
1229             vkFreeMemory(VulkanDevice, bad_memory, nullptr);
1230         }
1231         vkFreeMemory(VulkanDevice, VulkanMemory, nullptr);
1232     }
1233 }
1234 
GetBufferCurrent()1235 bool VkBufferTest::GetBufferCurrent() { return AllocateCurrent && BoundCurrent && CreateCurrent; }
1236 
GetBuffer()1237 const VkBuffer &VkBufferTest::GetBuffer() { return VulkanBuffer; }
1238 
TestDoubleDestroy()1239 void VkBufferTest::TestDoubleDestroy() {
1240     // Destroy the buffer but leave the flag set, which will cause
1241     // the buffer to be destroyed again in the destructor.
1242     vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
1243 }
1244 
1245 uint32_t VkVerticesObj::BindIdGenerator;
1246 
VkVerticesObj(VkDeviceObj * aVulkanDevice,unsigned aAttributeCount,unsigned aBindingCount,unsigned aByteStride,VkDeviceSize aVertexCount,const float * aVerticies)1247 VkVerticesObj::VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
1248                              VkDeviceSize aVertexCount, const float *aVerticies)
1249     : BoundCurrent(false),
1250       AttributeCount(aAttributeCount),
1251       BindingCount(aBindingCount),
1252       BindId(BindIdGenerator),
1253       PipelineVertexInputStateCreateInfo(),
1254       VulkanMemoryBuffer(aVulkanDevice, static_cast<int>(aByteStride * aVertexCount), reinterpret_cast<const void *>(aVerticies),
1255                          VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) {
1256     BindIdGenerator++;  // NB: This can wrap w/misuse
1257 
1258     VertexInputAttributeDescription = new VkVertexInputAttributeDescription[AttributeCount];
1259     VertexInputBindingDescription = new VkVertexInputBindingDescription[BindingCount];
1260 
1261     PipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions = VertexInputAttributeDescription;
1262     PipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount = AttributeCount;
1263     PipelineVertexInputStateCreateInfo.pVertexBindingDescriptions = VertexInputBindingDescription;
1264     PipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount = BindingCount;
1265     PipelineVertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1266 
1267     unsigned i = 0;
1268     do {
1269         VertexInputAttributeDescription[i].binding = BindId;
1270         VertexInputAttributeDescription[i].location = i;
1271         VertexInputAttributeDescription[i].format = VK_FORMAT_R32G32B32_SFLOAT;
1272         VertexInputAttributeDescription[i].offset = sizeof(float) * aByteStride;
1273         i++;
1274     } while (AttributeCount < i);
1275 
1276     i = 0;
1277     do {
1278         VertexInputBindingDescription[i].binding = BindId;
1279         VertexInputBindingDescription[i].stride = aByteStride;
1280         VertexInputBindingDescription[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1281         i++;
1282     } while (BindingCount < i);
1283 }
1284 
~VkVerticesObj()1285 VkVerticesObj::~VkVerticesObj() {
1286     if (VertexInputAttributeDescription) {
1287         delete[] VertexInputAttributeDescription;
1288     }
1289     if (VertexInputBindingDescription) {
1290         delete[] VertexInputBindingDescription;
1291     }
1292 }
1293 
AddVertexInputToPipe(VkPipelineObj & aPipelineObj)1294 bool VkVerticesObj::AddVertexInputToPipe(VkPipelineObj &aPipelineObj) {
1295     aPipelineObj.AddVertexInputAttribs(VertexInputAttributeDescription, AttributeCount);
1296     aPipelineObj.AddVertexInputBindings(VertexInputBindingDescription, BindingCount);
1297     return true;
1298 }
1299 
AddVertexInputToPipeHelpr(CreatePipelineHelper * pipelineHelper)1300 bool VkVerticesObj::AddVertexInputToPipeHelpr(CreatePipelineHelper *pipelineHelper) {
1301     pipelineHelper->vi_ci_.pVertexBindingDescriptions = VertexInputBindingDescription;
1302     pipelineHelper->vi_ci_.vertexBindingDescriptionCount = BindingCount;
1303     pipelineHelper->vi_ci_.pVertexAttributeDescriptions = VertexInputAttributeDescription;
1304     pipelineHelper->vi_ci_.vertexAttributeDescriptionCount = AttributeCount;
1305     return true;
1306 }
1307 
BindVertexBuffers(VkCommandBuffer aCommandBuffer,unsigned aOffsetCount,VkDeviceSize * aOffsetList)1308 void VkVerticesObj::BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount, VkDeviceSize *aOffsetList) {
1309     VkDeviceSize *offsetList;
1310     unsigned offsetCount;
1311 
1312     if (aOffsetCount) {
1313         offsetList = aOffsetList;
1314         offsetCount = aOffsetCount;
1315     } else {
1316         offsetList = new VkDeviceSize[1]();
1317         offsetCount = 1;
1318     }
1319 
1320     vkCmdBindVertexBuffers(aCommandBuffer, BindId, offsetCount, &VulkanMemoryBuffer.handle(), offsetList);
1321     BoundCurrent = true;
1322 
1323     if (!aOffsetCount) {
1324         delete[] offsetList;
1325     }
1326 }
1327 
OneOffDescriptorSet(VkDeviceObj * device,const Bindings & bindings,VkDescriptorSetLayoutCreateFlags layout_flags,void * layout_pnext,VkDescriptorPoolCreateFlags poolFlags,void * allocate_pnext)1328 OneOffDescriptorSet::OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings,
1329                                          VkDescriptorSetLayoutCreateFlags layout_flags, void *layout_pnext,
1330                                          VkDescriptorPoolCreateFlags poolFlags, void *allocate_pnext)
1331     : device_{device}, pool_{}, layout_(device, bindings, layout_flags, layout_pnext), set_{} {
1332     VkResult err;
1333 
1334     std::vector<VkDescriptorPoolSize> sizes;
1335     for (const auto &b : bindings) sizes.push_back({b.descriptorType, std::max(1u, b.descriptorCount)});
1336 
1337     VkDescriptorPoolCreateInfo dspci = {
1338         VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, poolFlags, 1, uint32_t(sizes.size()), sizes.data()};
1339     err = vkCreateDescriptorPool(device_->handle(), &dspci, nullptr, &pool_);
1340     if (err != VK_SUCCESS) return;
1341 
1342     VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, allocate_pnext, pool_, 1,
1343                                               &layout_.handle()};
1344     err = vkAllocateDescriptorSets(device_->handle(), &alloc_info, &set_);
1345 }
1346 
~OneOffDescriptorSet()1347 OneOffDescriptorSet::~OneOffDescriptorSet() {
1348     // No need to destroy set-- it's going away with the pool.
1349     vkDestroyDescriptorPool(device_->handle(), pool_, nullptr);
1350 }
1351 
Initialized()1352 bool OneOffDescriptorSet::Initialized() { return pool_ != VK_NULL_HANDLE && layout_.initialized() && set_ != VK_NULL_HANDLE; }
1353 
WriteDescriptorBufferInfo(int blinding,VkBuffer buffer,VkDeviceSize size,VkDescriptorType descriptorType)1354 void OneOffDescriptorSet::WriteDescriptorBufferInfo(int blinding, VkBuffer buffer, VkDeviceSize size,
1355                                                     VkDescriptorType descriptorType) {
1356     VkDescriptorBufferInfo buffer_info = {};
1357     buffer_info.buffer = buffer;
1358     buffer_info.offset = 0;
1359     buffer_info.range = size;
1360     buffer_infos.emplace_back(buffer_info);
1361     size_t index = buffer_infos.size() - 1;
1362 
1363     VkWriteDescriptorSet descriptor_write;
1364     memset(&descriptor_write, 0, sizeof(descriptor_write));
1365     descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1366     descriptor_write.dstSet = set_;
1367     descriptor_write.dstBinding = blinding;
1368     descriptor_write.descriptorCount = 1;
1369     descriptor_write.descriptorType = descriptorType;
1370     descriptor_write.pBufferInfo = &buffer_infos[index];
1371     descriptor_write.pImageInfo = nullptr;
1372     descriptor_write.pTexelBufferView = nullptr;
1373 
1374     descriptor_writes.emplace_back(descriptor_write);
1375 }
1376 
WriteDescriptorBufferView(int blinding,VkBufferView & buffer_view,VkDescriptorType descriptorType)1377 void OneOffDescriptorSet::WriteDescriptorBufferView(int blinding, VkBufferView &buffer_view, VkDescriptorType descriptorType) {
1378     VkWriteDescriptorSet descriptor_write;
1379     memset(&descriptor_write, 0, sizeof(descriptor_write));
1380     descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1381     descriptor_write.dstSet = set_;
1382     descriptor_write.dstBinding = blinding;
1383     descriptor_write.descriptorCount = 1;
1384     descriptor_write.descriptorType = descriptorType;
1385     descriptor_write.pTexelBufferView = &buffer_view;
1386     descriptor_write.pImageInfo = nullptr;
1387     descriptor_write.pBufferInfo = nullptr;
1388 
1389     descriptor_writes.emplace_back(descriptor_write);
1390 }
1391 
WriteDescriptorImageInfo(int blinding,VkImageView image_view,VkSampler sampler,VkDescriptorType descriptorType)1392 void OneOffDescriptorSet::WriteDescriptorImageInfo(int blinding, VkImageView image_view, VkSampler sampler,
1393                                                    VkDescriptorType descriptorType) {
1394     VkDescriptorImageInfo image_info = {};
1395     image_info.imageView = image_view;
1396     image_info.sampler = sampler;
1397     image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1398     image_infos.emplace_back(image_info);
1399     size_t index = image_infos.size() - 1;
1400 
1401     VkWriteDescriptorSet descriptor_write;
1402     memset(&descriptor_write, 0, sizeof(descriptor_write));
1403     descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1404     descriptor_write.dstSet = set_;
1405     descriptor_write.dstBinding = blinding;
1406     descriptor_write.descriptorCount = 1;
1407     descriptor_write.descriptorType = descriptorType;
1408     descriptor_write.pImageInfo = &image_infos[index];
1409     descriptor_write.pBufferInfo = nullptr;
1410     descriptor_write.pTexelBufferView = nullptr;
1411 
1412     descriptor_writes.emplace_back(descriptor_write);
1413 }
1414 
UpdateDescriptorSets()1415 void OneOffDescriptorSet::UpdateDescriptorSets() {
1416     vkUpdateDescriptorSets(device_->handle(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
1417 }
1418 
CreatePipelineHelper(VkLayerTest & test)1419 CreatePipelineHelper::CreatePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
1420 
~CreatePipelineHelper()1421 CreatePipelineHelper::~CreatePipelineHelper() {
1422     VkDevice device = layer_test_.device();
1423     vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
1424     vkDestroyPipeline(device, pipeline_, nullptr);
1425 }
1426 
InitDescriptorSetInfo()1427 void CreatePipelineHelper::InitDescriptorSetInfo() {
1428     dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
1429 }
1430 
InitInputAndVertexInfo()1431 void CreatePipelineHelper::InitInputAndVertexInfo() {
1432     vi_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1433 
1434     ia_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1435     ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1436 }
1437 
InitMultisampleInfo()1438 void CreatePipelineHelper::InitMultisampleInfo() {
1439     pipe_ms_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1440     pipe_ms_state_ci_.pNext = nullptr;
1441     pipe_ms_state_ci_.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1442     pipe_ms_state_ci_.sampleShadingEnable = VK_FALSE;
1443     pipe_ms_state_ci_.minSampleShading = 1.0;
1444     pipe_ms_state_ci_.pSampleMask = NULL;
1445 }
1446 
InitPipelineLayoutInfo()1447 void CreatePipelineHelper::InitPipelineLayoutInfo() {
1448     pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1449     pipeline_layout_ci_.setLayoutCount = 1;     // Not really changeable because InitState() sets exactly one pSetLayout
1450     pipeline_layout_ci_.pSetLayouts = nullptr;  // must bound after it is created
1451 }
1452 
InitViewportInfo()1453 void CreatePipelineHelper::InitViewportInfo() {
1454     viewport_ = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
1455     scissor_ = {{0, 0}, {64, 64}};
1456 
1457     vp_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1458     vp_state_ci_.pNext = nullptr;
1459     vp_state_ci_.viewportCount = 1;
1460     vp_state_ci_.pViewports = &viewport_;  // ignored if dynamic
1461     vp_state_ci_.scissorCount = 1;
1462     vp_state_ci_.pScissors = &scissor_;  // ignored if dynamic
1463 }
1464 
InitDynamicStateInfo()1465 void CreatePipelineHelper::InitDynamicStateInfo() {
1466     // Use a "validity" check on the {} initialized structure to detect initialization
1467     // during late bind
1468 }
1469 
InitShaderInfo()1470 void CreatePipelineHelper::InitShaderInfo() {
1471     vs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, &layer_test_));
1472     fs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, &layer_test_));
1473     // We shouldn't need a fragment shader but add it to be able to run on more devices
1474     shader_stages_ = {vs_->GetStageCreateInfo(), fs_->GetStageCreateInfo()};
1475 }
1476 
InitRasterizationInfo()1477 void CreatePipelineHelper::InitRasterizationInfo() {
1478     rs_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1479     rs_state_ci_.pNext = &line_state_ci_;
1480     rs_state_ci_.flags = 0;
1481     rs_state_ci_.depthClampEnable = VK_FALSE;
1482     rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
1483     rs_state_ci_.polygonMode = VK_POLYGON_MODE_FILL;
1484     rs_state_ci_.cullMode = VK_CULL_MODE_BACK_BIT;
1485     rs_state_ci_.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
1486     rs_state_ci_.depthBiasEnable = VK_FALSE;
1487     rs_state_ci_.lineWidth = 1.0F;
1488 }
1489 
InitLineRasterizationInfo()1490 void CreatePipelineHelper::InitLineRasterizationInfo() {
1491     line_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
1492     line_state_ci_.pNext = nullptr;
1493     line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
1494     line_state_ci_.stippledLineEnable = VK_FALSE;
1495     line_state_ci_.lineStippleFactor = 0;
1496     line_state_ci_.lineStipplePattern = 0;
1497 }
1498 
InitBlendStateInfo()1499 void CreatePipelineHelper::InitBlendStateInfo() {
1500     cb_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1501     cb_ci_.logicOpEnable = VK_FALSE;
1502     cb_ci_.logicOp = VK_LOGIC_OP_COPY;  // ignored if enable is VK_FALSE above
1503     cb_ci_.attachmentCount = layer_test_.RenderPassInfo().subpassCount;
1504     ASSERT_TRUE(IsValidVkStruct(layer_test_.RenderPassInfo()));
1505     cb_ci_.pAttachments = &cb_attachments_;
1506     for (int i = 0; i < 4; i++) {
1507         cb_ci_.blendConstants[0] = 1.0F;
1508     }
1509 }
1510 
InitGraphicsPipelineInfo()1511 void CreatePipelineHelper::InitGraphicsPipelineInfo() {
1512     // Color-only rendering in a subpass with no depth/stencil attachment
1513     // Active Pipeline Shader Stages
1514     //    Vertex Shader
1515     //    Fragment Shader
1516     // Required: Fixed-Function Pipeline Stages
1517     //    VkPipelineVertexInputStateCreateInfo
1518     //    VkPipelineInputAssemblyStateCreateInfo
1519     //    VkPipelineViewportStateCreateInfo
1520     //    VkPipelineRasterizationStateCreateInfo
1521     //    VkPipelineMultisampleStateCreateInfo
1522     //    VkPipelineColorBlendStateCreateInfo
1523     gp_ci_.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1524     gp_ci_.pNext = nullptr;
1525     gp_ci_.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
1526     gp_ci_.pVertexInputState = &vi_ci_;
1527     gp_ci_.pInputAssemblyState = &ia_ci_;
1528     gp_ci_.pTessellationState = nullptr;
1529     gp_ci_.pViewportState = &vp_state_ci_;
1530     gp_ci_.pRasterizationState = &rs_state_ci_;
1531     gp_ci_.pMultisampleState = &pipe_ms_state_ci_;
1532     gp_ci_.pDepthStencilState = nullptr;
1533     gp_ci_.pColorBlendState = &cb_ci_;
1534     gp_ci_.pDynamicState = nullptr;
1535     gp_ci_.renderPass = layer_test_.renderPass();
1536 }
1537 
InitPipelineCacheInfo()1538 void CreatePipelineHelper::InitPipelineCacheInfo() {
1539     pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1540     pc_ci_.pNext = nullptr;
1541     pc_ci_.flags = 0;
1542     pc_ci_.initialDataSize = 0;
1543     pc_ci_.pInitialData = nullptr;
1544 }
1545 
InitTesselationState()1546 void CreatePipelineHelper::InitTesselationState() {
1547     // TBD -- add shaders and create_info
1548 }
1549 
InitInfo()1550 void CreatePipelineHelper::InitInfo() {
1551     InitDescriptorSetInfo();
1552     InitInputAndVertexInfo();
1553     InitMultisampleInfo();
1554     InitPipelineLayoutInfo();
1555     InitViewportInfo();
1556     InitDynamicStateInfo();
1557     InitShaderInfo();
1558     InitRasterizationInfo();
1559     InitLineRasterizationInfo();
1560     InitBlendStateInfo();
1561     InitGraphicsPipelineInfo();
1562     InitPipelineCacheInfo();
1563 }
1564 
InitState()1565 void CreatePipelineHelper::InitState() {
1566     VkResult err;
1567     descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
1568     ASSERT_TRUE(descriptor_set_->Initialized());
1569 
1570     const std::vector<VkPushConstantRange> push_ranges(
1571         pipeline_layout_ci_.pPushConstantRanges,
1572         pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
1573     pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
1574 
1575     err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
1576     ASSERT_VK_SUCCESS(err);
1577 }
1578 
LateBindPipelineInfo()1579 void CreatePipelineHelper::LateBindPipelineInfo() {
1580     // By value or dynamically located items must be late bound
1581     gp_ci_.layout = pipeline_layout_.handle();
1582     gp_ci_.stageCount = shader_stages_.size();
1583     gp_ci_.pStages = shader_stages_.data();
1584     if ((gp_ci_.pTessellationState == nullptr) && IsValidVkStruct(tess_ci_)) {
1585         gp_ci_.pTessellationState = &tess_ci_;
1586     }
1587     if ((gp_ci_.pDynamicState == nullptr) && IsValidVkStruct(dyn_state_ci_)) {
1588         gp_ci_.pDynamicState = &dyn_state_ci_;
1589     }
1590 }
1591 
CreateGraphicsPipeline(bool implicit_destroy,bool do_late_bind)1592 VkResult CreatePipelineHelper::CreateGraphicsPipeline(bool implicit_destroy, bool do_late_bind) {
1593     VkResult err;
1594     if (do_late_bind) {
1595         LateBindPipelineInfo();
1596     }
1597     if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1598         vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1599         pipeline_ = VK_NULL_HANDLE;
1600     }
1601     err = vkCreateGraphicsPipelines(layer_test_.device(), pipeline_cache_, 1, &gp_ci_, NULL, &pipeline_);
1602     return err;
1603 }
1604 
CreateComputePipelineHelper(VkLayerTest & test)1605 CreateComputePipelineHelper::CreateComputePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
1606 
~CreateComputePipelineHelper()1607 CreateComputePipelineHelper::~CreateComputePipelineHelper() {
1608     VkDevice device = layer_test_.device();
1609     vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
1610     vkDestroyPipeline(device, pipeline_, nullptr);
1611 }
1612 
InitDescriptorSetInfo()1613 void CreateComputePipelineHelper::InitDescriptorSetInfo() {
1614     dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
1615 }
1616 
InitPipelineLayoutInfo()1617 void CreateComputePipelineHelper::InitPipelineLayoutInfo() {
1618     pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1619     pipeline_layout_ci_.setLayoutCount = 1;     // Not really changeable because InitState() sets exactly one pSetLayout
1620     pipeline_layout_ci_.pSetLayouts = nullptr;  // must bound after it is created
1621 }
1622 
InitShaderInfo()1623 void CreateComputePipelineHelper::InitShaderInfo() {
1624     cs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, &layer_test_));
1625     // We shouldn't need a fragment shader but add it to be able to run on more devices
1626 }
1627 
InitComputePipelineInfo()1628 void CreateComputePipelineHelper::InitComputePipelineInfo() {
1629     cp_ci_.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
1630     cp_ci_.pNext = nullptr;
1631     cp_ci_.flags = 0;
1632 }
1633 
InitPipelineCacheInfo()1634 void CreateComputePipelineHelper::InitPipelineCacheInfo() {
1635     pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1636     pc_ci_.pNext = nullptr;
1637     pc_ci_.flags = 0;
1638     pc_ci_.initialDataSize = 0;
1639     pc_ci_.pInitialData = nullptr;
1640 }
1641 
InitInfo()1642 void CreateComputePipelineHelper::InitInfo() {
1643     InitDescriptorSetInfo();
1644     InitPipelineLayoutInfo();
1645     InitShaderInfo();
1646     InitComputePipelineInfo();
1647     InitPipelineCacheInfo();
1648 }
1649 
InitState()1650 void CreateComputePipelineHelper::InitState() {
1651     VkResult err;
1652     descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
1653     ASSERT_TRUE(descriptor_set_->Initialized());
1654 
1655     const std::vector<VkPushConstantRange> push_ranges(
1656         pipeline_layout_ci_.pPushConstantRanges,
1657         pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
1658     pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
1659 
1660     err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
1661     ASSERT_VK_SUCCESS(err);
1662 }
1663 
LateBindPipelineInfo()1664 void CreateComputePipelineHelper::LateBindPipelineInfo() {
1665     // By value or dynamically located items must be late bound
1666     cp_ci_.layout = pipeline_layout_.handle();
1667     cp_ci_.stage = cs_.get()->GetStageCreateInfo();
1668 }
1669 
CreateComputePipeline(bool implicit_destroy,bool do_late_bind)1670 VkResult CreateComputePipelineHelper::CreateComputePipeline(bool implicit_destroy, bool do_late_bind) {
1671     VkResult err;
1672     if (do_late_bind) {
1673         LateBindPipelineInfo();
1674     }
1675     if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1676         vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1677         pipeline_ = VK_NULL_HANDLE;
1678     }
1679     err = vkCreateComputePipelines(layer_test_.device(), pipeline_cache_, 1, &cp_ci_, NULL, &pipeline_);
1680     return err;
1681 }
1682 
CreateNVRayTracingPipelineHelper(VkLayerTest & test)1683 CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipelineHelper(VkLayerTest &test) : layer_test_(test) {}
~CreateNVRayTracingPipelineHelper()1684 CreateNVRayTracingPipelineHelper::~CreateNVRayTracingPipelineHelper() {
1685     VkDevice device = layer_test_.device();
1686     vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
1687     vkDestroyPipeline(device, pipeline_, nullptr);
1688 }
1689 
InitInstanceExtensions(VkLayerTest & test,std::vector<const char * > & instance_extension_names)1690 bool CreateNVRayTracingPipelineHelper::InitInstanceExtensions(VkLayerTest &test,
1691                                                               std::vector<const char *> &instance_extension_names) {
1692     if (test.InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1693         instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1694     } else {
1695         printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
1696                VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1697         return false;
1698     }
1699     return true;
1700 }
1701 
InitDeviceExtensions(VkLayerTest & test,std::vector<const char * > & device_extension_names)1702 bool CreateNVRayTracingPipelineHelper::InitDeviceExtensions(VkLayerTest &test, std::vector<const char *> &device_extension_names) {
1703     std::array<const char *, 2> required_device_extensions = {
1704         {VK_NV_RAY_TRACING_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}};
1705     for (auto device_extension : required_device_extensions) {
1706         if (test.DeviceExtensionSupported(test.gpu(), nullptr, device_extension)) {
1707             device_extension_names.push_back(device_extension);
1708         } else {
1709             printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
1710             return false;
1711         }
1712     }
1713     return true;
1714 }
1715 
InitShaderGroups()1716 void CreateNVRayTracingPipelineHelper::InitShaderGroups() {
1717     {
1718         VkRayTracingShaderGroupCreateInfoNV group = {};
1719         group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
1720         group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
1721         group.generalShader = 0;
1722         group.closestHitShader = VK_SHADER_UNUSED_NV;
1723         group.anyHitShader = VK_SHADER_UNUSED_NV;
1724         group.intersectionShader = VK_SHADER_UNUSED_NV;
1725         groups_.push_back(group);
1726     }
1727     {
1728         VkRayTracingShaderGroupCreateInfoNV group = {};
1729         group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
1730         group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
1731         group.generalShader = VK_SHADER_UNUSED_NV;
1732         group.closestHitShader = 1;
1733         group.anyHitShader = VK_SHADER_UNUSED_NV;
1734         group.intersectionShader = VK_SHADER_UNUSED_NV;
1735         groups_.push_back(group);
1736     }
1737     {
1738         VkRayTracingShaderGroupCreateInfoNV group = {};
1739         group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
1740         group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
1741         group.generalShader = 2;
1742         group.closestHitShader = VK_SHADER_UNUSED_NV;
1743         group.anyHitShader = VK_SHADER_UNUSED_NV;
1744         group.intersectionShader = VK_SHADER_UNUSED_NV;
1745         groups_.push_back(group);
1746     }
1747 }
1748 
InitDescriptorSetInfo()1749 void CreateNVRayTracingPipelineHelper::InitDescriptorSetInfo() {
1750     dsl_bindings_ = {
1751         {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
1752         {1, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
1753     };
1754 }
1755 
InitPipelineLayoutInfo()1756 void CreateNVRayTracingPipelineHelper::InitPipelineLayoutInfo() {
1757     pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1758     pipeline_layout_ci_.setLayoutCount = 1;     // Not really changeable because InitState() sets exactly one pSetLayout
1759     pipeline_layout_ci_.pSetLayouts = nullptr;  // must bound after it is created
1760 }
1761 
InitShaderInfo()1762 void CreateNVRayTracingPipelineHelper::InitShaderInfo() {  // DONE
1763     static const char rayGenShaderText[] =
1764         "#version 460 core                                                \n"
1765         "#extension GL_NV_ray_tracing : require                           \n"
1766         "layout(set = 0, binding = 0, rgba8) uniform image2D image;       \n"
1767         "layout(set = 0, binding = 1) uniform accelerationStructureNV as; \n"
1768         "                                                                 \n"
1769         "layout(location = 0) rayPayloadNV float payload;                 \n"
1770         "                                                                 \n"
1771         "void main()                                                      \n"
1772         "{                                                                \n"
1773         "   vec4 col = vec4(0, 0, 0, 1);                                  \n"
1774         "                                                                 \n"
1775         "   vec3 origin = vec3(float(gl_LaunchIDNV.x)/float(gl_LaunchSizeNV.x), "
1776         "float(gl_LaunchIDNV.y)/float(gl_LaunchSizeNV.y), "
1777         "1.0); \n"
1778         "   vec3 dir = vec3(0.0, 0.0, -1.0);                              \n"
1779         "                                                                 \n"
1780         "   payload = 0.5;                                                \n"
1781         "   traceNV(as, gl_RayFlagsCullBackFacingTrianglesNV, 0xff, 0, 1, 0, origin, 0.0, dir, 1000.0, 0); \n"
1782         "                                                                 \n"
1783         "   col.y = payload;                                              \n"
1784         "                                                                 \n"
1785         "   imageStore(image, ivec2(gl_LaunchIDNV.xy), col);              \n"
1786         "}\n";
1787 
1788     static char const closestHitShaderText[] =
1789         "#version 460 core                              \n"
1790         "#extension GL_NV_ray_tracing : require         \n"
1791         "layout(location = 0) rayPayloadInNV float hitValue;             \n"
1792         "                                               \n"
1793         "void main() {                                  \n"
1794         "    hitValue = 1.0;                            \n"
1795         "}                                              \n";
1796 
1797     static char const missShaderText[] =
1798         "#version 460 core                              \n"
1799         "#extension GL_NV_ray_tracing : require         \n"
1800         "layout(location = 0) rayPayloadInNV float hitValue; \n"
1801         "                                               \n"
1802         "void main() {                                  \n"
1803         "    hitValue = 0.0;                            \n"
1804         "}                                              \n";
1805 
1806     rgs_.reset(new VkShaderObj(layer_test_.DeviceObj(), rayGenShaderText, VK_SHADER_STAGE_RAYGEN_BIT_NV, &layer_test_));
1807     chs_.reset(new VkShaderObj(layer_test_.DeviceObj(), closestHitShaderText, VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, &layer_test_));
1808     mis_.reset(new VkShaderObj(layer_test_.DeviceObj(), missShaderText, VK_SHADER_STAGE_MISS_BIT_NV, &layer_test_));
1809 
1810     shader_stages_ = {rgs_->GetStageCreateInfo(), chs_->GetStageCreateInfo(), mis_->GetStageCreateInfo()};
1811 }
1812 
InitNVRayTracingPipelineInfo()1813 void CreateNVRayTracingPipelineHelper::InitNVRayTracingPipelineInfo() {
1814     rp_ci_.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
1815 
1816     rp_ci_.stageCount = shader_stages_.size();
1817     rp_ci_.pStages = shader_stages_.data();
1818     rp_ci_.groupCount = groups_.size();
1819     rp_ci_.pGroups = groups_.data();
1820 }
1821 
InitPipelineCacheInfo()1822 void CreateNVRayTracingPipelineHelper::InitPipelineCacheInfo() {
1823     pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1824     pc_ci_.pNext = nullptr;
1825     pc_ci_.flags = 0;
1826     pc_ci_.initialDataSize = 0;
1827     pc_ci_.pInitialData = nullptr;
1828 }
1829 
InitInfo()1830 void CreateNVRayTracingPipelineHelper::InitInfo() {
1831     InitShaderGroups();
1832     InitDescriptorSetInfo();
1833     InitPipelineLayoutInfo();
1834     InitShaderInfo();
1835     InitNVRayTracingPipelineInfo();
1836     InitPipelineCacheInfo();
1837 }
1838 
InitState()1839 void CreateNVRayTracingPipelineHelper::InitState() {
1840     VkResult err;
1841     descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
1842     ASSERT_TRUE(descriptor_set_->Initialized());
1843 
1844     pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_});
1845 
1846     err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
1847     ASSERT_VK_SUCCESS(err);
1848 }
1849 
LateBindPipelineInfo()1850 void CreateNVRayTracingPipelineHelper::LateBindPipelineInfo() {
1851     // By value or dynamically located items must be late bound
1852     rp_ci_.layout = pipeline_layout_.handle();
1853     rp_ci_.stageCount = shader_stages_.size();
1854     rp_ci_.pStages = shader_stages_.data();
1855 }
1856 
CreateNVRayTracingPipeline(bool implicit_destroy,bool do_late_bind)1857 VkResult CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipeline(bool implicit_destroy, bool do_late_bind) {
1858     VkResult err;
1859     if (do_late_bind) {
1860         LateBindPipelineInfo();
1861     }
1862     if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1863         vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1864         pipeline_ = VK_NULL_HANDLE;
1865     }
1866 
1867     PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV =
1868         (PFN_vkCreateRayTracingPipelinesNV)vkGetInstanceProcAddr(layer_test_.instance(), "vkCreateRayTracingPipelinesNV");
1869     err = vkCreateRayTracingPipelinesNV(layer_test_.device(), pipeline_cache_, 1, &rp_ci_, nullptr, &pipeline_);
1870     return err;
1871 }
1872 
1873 namespace chain_util {
Head() const1874 const void *ExtensionChain::Head() const { return head_; }
1875 }  // namespace chain_util
1876 
~QueueFamilyObjs()1877 BarrierQueueFamilyTestHelper::QueueFamilyObjs::~QueueFamilyObjs() {
1878     delete command_buffer2;
1879     delete command_buffer;
1880     delete command_pool;
1881     delete queue;
1882 }
1883 
Init(VkDeviceObj * device,uint32_t qf_index,VkQueue qf_queue,VkCommandPoolCreateFlags cp_flags)1884 void BarrierQueueFamilyTestHelper::QueueFamilyObjs::Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue,
1885                                                          VkCommandPoolCreateFlags cp_flags) {
1886     index = qf_index;
1887     queue = new VkQueueObj(qf_queue, qf_index);
1888     command_pool = new VkCommandPoolObj(device, qf_index, cp_flags);
1889     command_buffer = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
1890     command_buffer2 = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
1891 };
1892 
Context(VkLayerTest * test,const std::vector<uint32_t> & queue_family_indices)1893 BarrierQueueFamilyTestHelper::Context::Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices)
1894     : layer_test(test) {
1895     if (0 == queue_family_indices.size()) {
1896         return;  // This is invalid
1897     }
1898     VkDeviceObj *device_obj = layer_test->DeviceObj();
1899     queue_families.reserve(queue_family_indices.size());
1900     default_index = queue_family_indices[0];
1901     for (auto qfi : queue_family_indices) {
1902         VkQueue queue = device_obj->queue_family_queues(qfi)[0]->handle();
1903         queue_families.emplace(std::make_pair(qfi, QueueFamilyObjs()));
1904         queue_families[qfi].Init(device_obj, qfi, queue, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
1905     }
1906     Reset();
1907 }
1908 
Reset()1909 void BarrierQueueFamilyTestHelper::Context::Reset() {
1910     layer_test->DeviceObj()->wait();
1911     for (auto &qf : queue_families) {
1912         vkResetCommandPool(layer_test->device(), qf.second.command_pool->handle(), 0);
1913     }
1914 }
1915 
BarrierQueueFamilyTestHelper(Context * context)1916 BarrierQueueFamilyTestHelper::BarrierQueueFamilyTestHelper(Context *context)
1917     : context_(context), image_(context->layer_test->DeviceObj()) {}
1918 
Init(std::vector<uint32_t> * families,bool image_memory,bool buffer_memory)1919 void BarrierQueueFamilyTestHelper::Init(std::vector<uint32_t> *families, bool image_memory, bool buffer_memory) {
1920     VkDeviceObj *device_obj = context_->layer_test->DeviceObj();
1921 
1922     image_.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0, families,
1923                 image_memory);
1924 
1925     ASSERT_TRUE(image_.initialized());
1926 
1927     image_barrier_ = image_.image_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, image_.Layout(),
1928                                                  image_.Layout(), image_.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1));
1929 
1930     VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1931     buffer_.init_as_src_and_dst(*device_obj, 256, mem_prop, families, buffer_memory);
1932     ASSERT_TRUE(buffer_.initialized());
1933     buffer_barrier_ = buffer_.buffer_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, 0, VK_WHOLE_SIZE);
1934 }
1935 
GetQueueFamilyInfo(Context * context,uint32_t qfi)1936 BarrierQueueFamilyTestHelper::QueueFamilyObjs *BarrierQueueFamilyTestHelper::GetQueueFamilyInfo(Context *context, uint32_t qfi) {
1937     QueueFamilyObjs *qf;
1938 
1939     auto qf_it = context->queue_families.find(qfi);
1940     if (qf_it != context->queue_families.end()) {
1941         qf = &(qf_it->second);
1942     } else {
1943         qf = &(context->queue_families[context->default_index]);
1944     }
1945     return qf;
1946 }
1947 
operator ()(std::string img_err,std::string buf_err,uint32_t src,uint32_t dst,bool positive,uint32_t queue_family_index,Modifier mod)1948 void BarrierQueueFamilyTestHelper::operator()(std::string img_err, std::string buf_err, uint32_t src, uint32_t dst, bool positive,
1949                                               uint32_t queue_family_index, Modifier mod) {
1950     auto monitor = context_->layer_test->Monitor();
1951     if (img_err.length()) monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, img_err);
1952     if (buf_err.length()) monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, buf_err);
1953 
1954     image_barrier_.srcQueueFamilyIndex = src;
1955     image_barrier_.dstQueueFamilyIndex = dst;
1956     buffer_barrier_.srcQueueFamilyIndex = src;
1957     buffer_barrier_.dstQueueFamilyIndex = dst;
1958 
1959     QueueFamilyObjs *qf = GetQueueFamilyInfo(context_, queue_family_index);
1960 
1961     VkCommandBufferObj *command_buffer = qf->command_buffer;
1962     for (int cb_repeat = 0; cb_repeat < (mod == Modifier::DOUBLE_COMMAND_BUFFER ? 2 : 1); cb_repeat++) {
1963         command_buffer->begin();
1964         for (int repeat = 0; repeat < (mod == Modifier::DOUBLE_RECORD ? 2 : 1); repeat++) {
1965             vkCmdPipelineBarrier(command_buffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1966                                  VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buffer_barrier_, 1, &image_barrier_);
1967         }
1968         command_buffer->end();
1969         command_buffer = qf->command_buffer2;  // Second pass (if any) goes to the secondary command_buffer.
1970     }
1971 
1972     if (queue_family_index != kInvalidQueueFamily) {
1973         if (mod == Modifier::DOUBLE_COMMAND_BUFFER) {
1974             // the Fence resolves to VK_NULL_HANLE... i.e. no fence
1975             qf->queue->submit({{qf->command_buffer, qf->command_buffer2}}, vk_testing::Fence(), positive);
1976         } else {
1977             qf->command_buffer->QueueCommandBuffer(positive);  // Check for success on positive tests only
1978         }
1979     }
1980 
1981     if (positive) {
1982         monitor->VerifyNotFound();
1983     } else {
1984         monitor->VerifyFound();
1985     }
1986     context_->Reset();
1987 };
1988 
print_android(const char * c)1989 void print_android(const char *c) {
1990 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1991     __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "%s", c);
1992 #endif  // VK_USE_PLATFORM_ANDROID_KHR
1993 }
1994 
1995 #if defined(ANDROID) && defined(VALIDATION_APK)
1996 const char *appTag = "VulkanLayerValidationTests";
1997 static bool initialized = false;
1998 static bool active = false;
1999 
2000 // Convert Intents to argv
2001 // Ported from Hologram sample, only difference is flexible key
get_args(android_app & app,const char * intent_extra_data_key)2002 std::vector<std::string> get_args(android_app &app, const char *intent_extra_data_key) {
2003     std::vector<std::string> args;
2004     JavaVM &vm = *app.activity->vm;
2005     JNIEnv *p_env;
2006     if (vm.AttachCurrentThread(&p_env, nullptr) != JNI_OK) return args;
2007 
2008     JNIEnv &env = *p_env;
2009     jobject activity = app.activity->clazz;
2010     jmethodID get_intent_method = env.GetMethodID(env.GetObjectClass(activity), "getIntent", "()Landroid/content/Intent;");
2011     jobject intent = env.CallObjectMethod(activity, get_intent_method);
2012     jmethodID get_string_extra_method =
2013         env.GetMethodID(env.GetObjectClass(intent), "getStringExtra", "(Ljava/lang/String;)Ljava/lang/String;");
2014     jvalue get_string_extra_args;
2015     get_string_extra_args.l = env.NewStringUTF(intent_extra_data_key);
2016     jstring extra_str = static_cast<jstring>(env.CallObjectMethodA(intent, get_string_extra_method, &get_string_extra_args));
2017 
2018     std::string args_str;
2019     if (extra_str) {
2020         const char *extra_utf = env.GetStringUTFChars(extra_str, nullptr);
2021         args_str = extra_utf;
2022         env.ReleaseStringUTFChars(extra_str, extra_utf);
2023         env.DeleteLocalRef(extra_str);
2024     }
2025 
2026     env.DeleteLocalRef(get_string_extra_args.l);
2027     env.DeleteLocalRef(intent);
2028     vm.DetachCurrentThread();
2029 
2030     // split args_str
2031     std::stringstream ss(args_str);
2032     std::string arg;
2033     while (std::getline(ss, arg, ' ')) {
2034         if (!arg.empty()) args.push_back(arg);
2035     }
2036 
2037     return args;
2038 }
2039 
addFullTestCommentIfPresent(const::testing::TestInfo & test_info,std::string & error_message)2040 void addFullTestCommentIfPresent(const ::testing::TestInfo &test_info, std::string &error_message) {
2041     const char *const type_param = test_info.type_param();
2042     const char *const value_param = test_info.value_param();
2043 
2044     if (type_param != NULL || value_param != NULL) {
2045         error_message.append(", where ");
2046         if (type_param != NULL) {
2047             error_message.append("TypeParam = ").append(type_param);
2048             if (value_param != NULL) error_message.append(" and ");
2049         }
2050         if (value_param != NULL) {
2051             error_message.append("GetParam() = ").append(value_param);
2052         }
2053     }
2054 }
2055 
2056 // Inspired by https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
2057 class LogcatPrinter : public ::testing::EmptyTestEventListener {
2058     // Called before a test starts.
OnTestStart(const::testing::TestInfo & test_info)2059     virtual void OnTestStart(const ::testing::TestInfo &test_info) {
2060         __android_log_print(ANDROID_LOG_INFO, appTag, "[ RUN      ] %s.%s", test_info.test_case_name(), test_info.name());
2061     }
2062 
2063     // Called after a failed assertion or a SUCCEED() invocation.
OnTestPartResult(const::testing::TestPartResult & result)2064     virtual void OnTestPartResult(const ::testing::TestPartResult &result) {
2065         // If the test part succeeded, we don't need to do anything.
2066         if (result.type() == ::testing::TestPartResult::kSuccess) return;
2067 
2068         __android_log_print(ANDROID_LOG_INFO, appTag, "%s in %s:%d %s", result.failed() ? "*** Failure" : "Success",
2069                             result.file_name(), result.line_number(), result.summary());
2070     }
2071 
2072     // Called after a test ends.
OnTestEnd(const::testing::TestInfo & info)2073     virtual void OnTestEnd(const ::testing::TestInfo &info) {
2074         std::string result;
2075         if (info.result()->Passed()) {
2076             result.append("[       OK ]");
2077         } else {
2078             result.append("[  FAILED  ]");
2079         }
2080         result.append(info.test_case_name()).append(".").append(info.name());
2081         if (info.result()->Failed()) addFullTestCommentIfPresent(info, result);
2082 
2083         if (::testing::GTEST_FLAG(print_time)) {
2084             std::ostringstream os;
2085             os << info.result()->elapsed_time();
2086             result.append(" (").append(os.str()).append(" ms)");
2087         }
2088 
2089         __android_log_print(ANDROID_LOG_INFO, appTag, "%s", result.c_str());
2090     };
2091 };
2092 
processInput(struct android_app * app,AInputEvent * event)2093 static int32_t processInput(struct android_app *app, AInputEvent *event) { return 0; }
2094 
processCommand(struct android_app * app,int32_t cmd)2095 static void processCommand(struct android_app *app, int32_t cmd) {
2096     switch (cmd) {
2097         case APP_CMD_INIT_WINDOW: {
2098             if (app->window) {
2099                 initialized = true;
2100                 VkTestFramework::window = app->window;
2101             }
2102             break;
2103         }
2104         case APP_CMD_GAINED_FOCUS: {
2105             active = true;
2106             break;
2107         }
2108         case APP_CMD_LOST_FOCUS: {
2109             active = false;
2110             break;
2111         }
2112     }
2113 }
2114 
android_main(struct android_app * app)2115 void android_main(struct android_app *app) {
2116     int vulkanSupport = InitVulkan();
2117     if (vulkanSupport == 0) {
2118         __android_log_print(ANDROID_LOG_INFO, appTag, "==== FAILED ==== No Vulkan support found");
2119         return;
2120     }
2121 
2122     app->onAppCmd = processCommand;
2123     app->onInputEvent = processInput;
2124 
2125     while (1) {
2126         int events;
2127         struct android_poll_source *source;
2128         while (ALooper_pollAll(active ? 0 : -1, NULL, &events, (void **)&source) >= 0) {
2129             if (source) {
2130                 source->process(app, source);
2131             }
2132 
2133             if (app->destroyRequested != 0) {
2134                 VkTestFramework::Finish();
2135                 return;
2136             }
2137         }
2138 
2139         if (initialized && active) {
2140             // Use the following key to send arguments to gtest, i.e.
2141             // --es args "--gtest_filter=-VkLayerTest.foo"
2142             const char key[] = "args";
2143             std::vector<std::string> args = get_args(*app, key);
2144 
2145             std::string filter = "";
2146             if (args.size() > 0) {
2147                 __android_log_print(ANDROID_LOG_INFO, appTag, "Intent args = %s", args[0].c_str());
2148                 filter += args[0];
2149             } else {
2150                 __android_log_print(ANDROID_LOG_INFO, appTag, "No Intent args detected");
2151             }
2152 
2153             int argc = 2;
2154             char *argv[] = {(char *)"foo", (char *)filter.c_str()};
2155             __android_log_print(ANDROID_LOG_DEBUG, appTag, "filter = %s", argv[1]);
2156 
2157             // Route output to files until we can override the gtest output
2158             freopen("/sdcard/Android/data/com.example.VulkanLayerValidationTests/files/out.txt", "w", stdout);
2159             freopen("/sdcard/Android/data/com.example.VulkanLayerValidationTests/files/err.txt", "w", stderr);
2160 
2161             ::testing::InitGoogleTest(&argc, argv);
2162 
2163             ::testing::TestEventListeners &listeners = ::testing::UnitTest::GetInstance()->listeners();
2164             listeners.Append(new LogcatPrinter);
2165 
2166             VkTestFramework::InitArgs(&argc, argv);
2167             ::testing::AddGlobalTestEnvironment(new TestEnvironment);
2168 
2169             int result = RUN_ALL_TESTS();
2170 
2171             if (result != 0) {
2172                 __android_log_print(ANDROID_LOG_INFO, appTag, "==== Tests FAILED ====");
2173             } else {
2174                 __android_log_print(ANDROID_LOG_INFO, appTag, "==== Tests PASSED ====");
2175             }
2176 
2177             VkTestFramework::Finish();
2178 
2179             fclose(stdout);
2180             fclose(stderr);
2181 
2182             ANativeActivity_finish(app->activity);
2183             return;
2184         }
2185     }
2186 }
2187 #endif
2188 
2189 #if defined(_WIN32) && !defined(NDEBUG)
2190 #include <crtdbg.h>
2191 #endif
2192 
main(int argc,char ** argv)2193 int main(int argc, char **argv) {
2194     int result;
2195 
2196 #ifdef ANDROID
2197     int vulkanSupport = InitVulkan();
2198     if (vulkanSupport == 0) return 1;
2199 #endif
2200 
2201 #if defined(_WIN32) && !defined(NDEBUG)
2202     _CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE);
2203     _CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);
2204 #endif
2205 
2206     ::testing::InitGoogleTest(&argc, argv);
2207     VkTestFramework::InitArgs(&argc, argv);
2208 
2209     ::testing::AddGlobalTestEnvironment(new TestEnvironment);
2210 
2211     result = RUN_ALL_TESTS();
2212 
2213     VkTestFramework::Finish();
2214     return result;
2215 }
2216