• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2015-2019 The Khronos Group Inc.
2  * Copyright (c) 2015-2019 Valve Corporation
3  * Copyright (c) 2015-2019 LunarG, Inc.
4  * Copyright (C) 2015-2019 Google Inc.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * Author: Cody Northrop <cnorthrop@google.com>
19  * Author: Michael Lentine <mlentine@google.com>
20  * Author: Tobin Ehlis <tobine@google.com>
21  * Author: Chia-I Wu <olv@google.com>
22  * Author: Chris Forbes <chrisf@ijw.co.nz>
23  * Author: Mark Lobodzinski <mark@lunarg.com>
24  * Author: Ian Elliott <ianelliott@google.com>
25  * Author: Dave Houlton <daveh@lunarg.com>
26  * Author: Dustin Graves <dustin@lunarg.com>
27  * Author: Jeremy Hayes <jeremy@lunarg.com>
28  * Author: Jon Ashburn <jon@lunarg.com>
29  * Author: Karl Schultz <karl@lunarg.com>
30  * Author: Mark Young <marky@lunarg.com>
31  * Author: Mike Schuchardt <mikes@lunarg.com>
32  * Author: Mike Weiblen <mikew@lunarg.com>
33  * Author: Tony Barbour <tony@LunarG.com>
34  * Author: John Zulauf <jzulauf@lunarg.com>
35  * Author: Shannon McPherson <shannon@lunarg.com>
36  */
37 
38 // Allow use of STL min and max functions in Windows
39 #define NOMINMAX
40 
41 #include <algorithm>
42 #include <array>
43 #include <assert.h>
44 #include <cmath>
45 #include <iostream>
46 #include <list>
47 #include <math.h>
48 #include <map>
49 #include <memory>
50 #include <mutex>
51 #include <set>
52 #include <sstream>
53 #include <stdio.h>
54 #include <stdlib.h>
55 #include <string.h>
56 #include <string>
57 #include <valarray>
58 
59 #include "vk_loader_platform.h"
60 #include "vk_dispatch_table_helper.h"
61 #include "vk_enum_string_helper.h"
62 #include "chassis.h"
63 #include "convert_to_renderpass2.h"
64 #include "core_validation.h"
65 #include "buffer_validation.h"
66 #include "shader_validation.h"
67 #include "vk_layer_utils.h"
68 
69 // These functions are defined *outside* the core_validation namespace as their type
70 // is also defined outside that namespace
hash() const71 size_t PipelineLayoutCompatDef::hash() const {
72     hash_util::HashCombiner hc;
73     // The set number is integral to the CompatDef's distinctiveness
74     hc << set << push_constant_ranges.get();
75     const auto &descriptor_set_layouts = *set_layouts_id.get();
76     for (uint32_t i = 0; i <= set; i++) {
77         hc << descriptor_set_layouts[i].get();
78     }
79     return hc.Value();
80 }
81 
operator ==(const PipelineLayoutCompatDef & other) const82 bool PipelineLayoutCompatDef::operator==(const PipelineLayoutCompatDef &other) const {
83     if ((set != other.set) || (push_constant_ranges != other.push_constant_ranges)) {
84         return false;
85     }
86 
87     if (set_layouts_id == other.set_layouts_id) {
88         // if it's the same set_layouts_id, then *any* subset will match
89         return true;
90     }
91 
92     // They aren't exactly the same PipelineLayoutSetLayouts, so we need to check if the required subsets match
93     const auto &descriptor_set_layouts = *set_layouts_id.get();
94     assert(set < descriptor_set_layouts.size());
95     const auto &other_ds_layouts = *other.set_layouts_id.get();
96     assert(set < other_ds_layouts.size());
97     for (uint32_t i = 0; i <= set; i++) {
98         if (descriptor_set_layouts[i] != other_ds_layouts[i]) {
99             return false;
100         }
101     }
102     return true;
103 }
104 
105 using std::max;
106 using std::string;
107 using std::stringstream;
108 using std::unique_ptr;
109 using std::unordered_map;
110 using std::unordered_set;
111 using std::vector;
112 
113 // WSI Image Objects bypass usual Image Object creation methods.  A special Memory
114 // Object value will be used to identify them internally.
115 static const VkDeviceMemory MEMTRACKER_SWAP_CHAIN_IMAGE_KEY = (VkDeviceMemory)(-1);
116 // 2nd special memory handle used to flag object as unbound from memory
117 static const VkDeviceMemory MEMORY_UNBOUND = VkDeviceMemory(~((uint64_t)(0)) - 1);
118 
119 // Get the global maps of pending releases
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkImageMemoryBarrier>::Tag & type_tag) const120 const GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
121     const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) const {
122     return qfo_release_image_barrier_map;
123 }
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag & type_tag) const124 const GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
125     const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) const {
126     return qfo_release_buffer_barrier_map;
127 }
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkImageMemoryBarrier>::Tag & type_tag)128 GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
129     const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
130     return qfo_release_image_barrier_map;
131 }
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag & type_tag)132 GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
133     const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
134     return qfo_release_buffer_barrier_map;
135 }
136 
137 // Get the image viewstate for a given framebuffer attachment
GetAttachmentImageViewState(FRAMEBUFFER_STATE * framebuffer,uint32_t index)138 IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
139     assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
140     const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
141     return GetImageViewState(image_view);
142 }
143 
GetEventState(VkEvent event)144 EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
145     auto it = eventMap.find(event);
146     if (it == eventMap.end()) {
147         return nullptr;
148     }
149     return &it->second;
150 }
151 
GetQueueState(VkQueue queue) const152 const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
153     auto it = queueMap.find(queue);
154     if (it == queueMap.cend()) {
155         return nullptr;
156     }
157     return &it->second;
158 }
GetQueueState(VkQueue queue)159 QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
160     auto it = queueMap.find(queue);
161     if (it == queueMap.end()) {
162         return nullptr;
163     }
164     return &it->second;
165 }
166 
GetPhysicalDeviceState(VkPhysicalDevice phys) const167 const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
168     auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
169     auto it = phys_dev_map->find(phys);
170     if (it == phys_dev_map->end()) {
171         return nullptr;
172     }
173     return &it->second;
174 }
175 
GetPhysicalDeviceState(VkPhysicalDevice phys)176 PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
177     auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
178     auto it = phys_dev_map->find(phys);
179     if (it == phys_dev_map->end()) {
180         return nullptr;
181     }
182     return &it->second;
183 }
184 
GetPhysicalDeviceState()185 PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
GetPhysicalDeviceState() const186 const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
187 
188 // Return ptr to memory binding for given handle of specified type
189 template <typename State, typename Result>
GetObjectMemBindingImpl(State state,const VulkanTypedHandle & typed_handle)190 static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
191     switch (typed_handle.type) {
192         case kVulkanObjectTypeImage:
193             return state->GetImageState(typed_handle.Cast<VkImage>());
194         case kVulkanObjectTypeBuffer:
195             return state->GetBufferState(typed_handle.Cast<VkBuffer>());
196         case kVulkanObjectTypeAccelerationStructureNV:
197             return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
198         default:
199             break;
200     }
201     return nullptr;
202 }
GetObjectMemBinding(const VulkanTypedHandle & typed_handle) const203 const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
204     return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
205 }
GetObjectMemBinding(const VulkanTypedHandle & typed_handle)206 BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
207     return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
208 }
209 
InitialLayoutState(const CMD_BUFFER_STATE & cb_state,const IMAGE_VIEW_STATE * view_state)210 ImageSubresourceLayoutMap::InitialLayoutState::InitialLayoutState(const CMD_BUFFER_STATE &cb_state,
211                                                                   const IMAGE_VIEW_STATE *view_state)
212     : image_view(VK_NULL_HANDLE), aspect_mask(0), label(cb_state.debug_label) {
213     if (view_state) {
214         image_view = view_state->image_view;
215         aspect_mask = view_state->create_info.subresourceRange.aspectMask;
216     }
217 }
218 
FormatDebugLabel(const char * prefix,const LoggingLabel & label)219 std::string FormatDebugLabel(const char *prefix, const LoggingLabel &label) {
220     if (label.Empty()) return std::string();
221     std::string out;
222     string_sprintf(&out, "%sVkDebugUtilsLabel(name='%s' color=[%g, %g %g, %g])", prefix, label.name.c_str(), label.color[0],
223                    label.color[1], label.color[2], label.color[3]);
224     return out;
225 }
226 
227 // the ImageLayoutMap implementation bakes in the number of valid aspects -- we have to choose the correct one at construction time
228 template <uint32_t kThreshold>
LayoutMapFactoryByAspect(const IMAGE_STATE & image_state)229 static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactoryByAspect(const IMAGE_STATE &image_state) {
230     ImageSubresourceLayoutMap *map = nullptr;
231     switch (image_state.full_range.aspectMask) {
232         case VK_IMAGE_ASPECT_COLOR_BIT:
233             map = new ImageSubresourceLayoutMapImpl<ColorAspectTraits, kThreshold>(image_state);
234             break;
235         case VK_IMAGE_ASPECT_DEPTH_BIT:
236             map = new ImageSubresourceLayoutMapImpl<DepthAspectTraits, kThreshold>(image_state);
237             break;
238         case VK_IMAGE_ASPECT_STENCIL_BIT:
239             map = new ImageSubresourceLayoutMapImpl<StencilAspectTraits, kThreshold>(image_state);
240             break;
241         case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
242             map = new ImageSubresourceLayoutMapImpl<DepthStencilAspectTraits, kThreshold>(image_state);
243             break;
244         case VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT:
245             map = new ImageSubresourceLayoutMapImpl<Multiplane2AspectTraits, kThreshold>(image_state);
246             break;
247         case VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT:
248             map = new ImageSubresourceLayoutMapImpl<Multiplane3AspectTraits, kThreshold>(image_state);
249             break;
250     }
251 
252     assert(map);  // We shouldn't be able to get here null unless the traits cases are incomplete
253     return std::unique_ptr<ImageSubresourceLayoutMap>(map);
254 }
255 
LayoutMapFactory(const IMAGE_STATE & image_state)256 static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactory(const IMAGE_STATE &image_state) {
257     std::unique_ptr<ImageSubresourceLayoutMap> map;
258     const uint32_t kAlwaysDenseLimit = 16;  // About a cacheline on deskop architectures
259     if (image_state.full_range.layerCount <= kAlwaysDenseLimit) {
260         // Create a dense row map
261         map = LayoutMapFactoryByAspect<0>(image_state);
262     } else {
263         // Create an initially sparse row map
264         map = LayoutMapFactoryByAspect<kAlwaysDenseLimit>(image_state);
265     }
266     return map;
267 }
268 
269 // The const variant only need the image as it is the key for the map
GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE * cb_state,VkImage image)270 const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image) {
271     auto it = cb_state->image_layout_map.find(image);
272     if (it == cb_state->image_layout_map.cend()) {
273         return nullptr;
274     }
275     return it->second.get();
276 }
277 
278 // The non-const variant only needs the image state, as the factory requires it to construct a new entry
GetImageSubresourceLayoutMap(CMD_BUFFER_STATE * cb_state,const IMAGE_STATE & image_state)279 ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state) {
280     auto it = cb_state->image_layout_map.find(image_state.image);
281     if (it == cb_state->image_layout_map.end()) {
282         // Empty slot... fill it in.
283         auto insert_pair = cb_state->image_layout_map.insert(std::make_pair(image_state.image, LayoutMapFactory(image_state)));
284         assert(insert_pair.second);
285         ImageSubresourceLayoutMap *new_map = insert_pair.first->second.get();
286         assert(new_map);
287         return new_map;
288     }
289     return it->second.get();
290 }
291 
AddMemObjInfo(void * object,const VkDeviceMemory mem,const VkMemoryAllocateInfo * pAllocateInfo)292 void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
293     assert(object != NULL);
294 
295     auto *mem_info = new DEVICE_MEMORY_STATE(object, mem, pAllocateInfo);
296     memObjMap[mem] = unique_ptr<DEVICE_MEMORY_STATE>(mem_info);
297 
298     auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
299     if (dedicated) {
300         mem_info->is_dedicated = true;
301         mem_info->dedicated_buffer = dedicated->buffer;
302         mem_info->dedicated_image = dedicated->image;
303     }
304     auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
305     if (export_info) {
306         mem_info->is_export = true;
307         mem_info->export_handle_type_flags = export_info->handleTypes;
308     }
309 }
310 
311 // Create binding link between given sampler and command buffer node
AddCommandBufferBindingSampler(CMD_BUFFER_STATE * cb_node,SAMPLER_STATE * sampler_state)312 void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
313     if (disabled.command_buffer_state) {
314         return;
315     }
316     auto inserted = cb_node->object_bindings.emplace(sampler_state->sampler, kVulkanObjectTypeSampler);
317     if (inserted.second) {
318         // Only need to complete the cross-reference if this is a new item
319         sampler_state->cb_bindings.insert(cb_node);
320     }
321 }
322 
323 // Create binding link between given image node and command buffer node
AddCommandBufferBindingImage(CMD_BUFFER_STATE * cb_node,IMAGE_STATE * image_state)324 void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
325     if (disabled.command_buffer_state) {
326         return;
327     }
328     // Skip validation if this image was created through WSI
329     if (image_state->binding.mem != MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
330         // First update cb binding for image
331         auto image_inserted = cb_node->object_bindings.emplace(image_state->image, kVulkanObjectTypeImage);
332         if (image_inserted.second) {
333             // Only need to continue if this is a new item (the rest of the work would have be done previous)
334             image_state->cb_bindings.insert(cb_node);
335             // Now update CB binding in MemObj mini CB list
336             for (auto mem_binding : image_state->GetBoundMemory()) {
337                 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
338                 if (pMemInfo) {
339                     // Now update CBInfo's Mem reference list
340                     auto mem_inserted = cb_node->memObjs.insert(mem_binding);
341                     if (mem_inserted.second) {
342                         // Only need to complete the cross-reference if this is a new item
343                         pMemInfo->cb_bindings.insert(cb_node);
344                     }
345                 }
346             }
347         }
348     }
349 }
350 
351 // Create binding link between given image view node and its image with command buffer node
AddCommandBufferBindingImageView(CMD_BUFFER_STATE * cb_node,IMAGE_VIEW_STATE * view_state)352 void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
353     if (disabled.command_buffer_state) {
354         return;
355     }
356     // First add bindings for imageView
357     auto inserted = cb_node->object_bindings.emplace(view_state->image_view, kVulkanObjectTypeImageView);
358     if (inserted.second) {
359         // Only need to continue if this is a new item
360         view_state->cb_bindings.insert(cb_node);
361         auto image_state = GetImageState(view_state->create_info.image);
362         // Add bindings for image within imageView
363         if (image_state) {
364             AddCommandBufferBindingImage(cb_node, image_state);
365         }
366     }
367 }
368 
369 // Create binding link between given buffer node and command buffer node
AddCommandBufferBindingBuffer(CMD_BUFFER_STATE * cb_node,BUFFER_STATE * buffer_state)370 void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
371     if (disabled.command_buffer_state) {
372         return;
373     }
374     // First update cb binding for buffer
375     auto buffer_inserted = cb_node->object_bindings.emplace(buffer_state->buffer, kVulkanObjectTypeBuffer);
376     if (buffer_inserted.second) {
377         // Only need to continue if this is a new item
378         buffer_state->cb_bindings.insert(cb_node);
379         // Now update CB binding in MemObj mini CB list
380         for (auto mem_binding : buffer_state->GetBoundMemory()) {
381             DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
382             if (pMemInfo) {
383                 // Now update CBInfo's Mem reference list
384                 auto inserted = cb_node->memObjs.insert(mem_binding);
385                 if (inserted.second) {
386                     // Only need to complete the cross-reference if this is a new item
387                     pMemInfo->cb_bindings.insert(cb_node);
388                 }
389             }
390         }
391     }
392 }
393 
394 // Create binding link between given buffer view node and its buffer with command buffer node
AddCommandBufferBindingBufferView(CMD_BUFFER_STATE * cb_node,BUFFER_VIEW_STATE * view_state)395 void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
396     if (disabled.command_buffer_state) {
397         return;
398     }
399     // First add bindings for bufferView
400     auto inserted = cb_node->object_bindings.emplace(view_state->buffer_view, kVulkanObjectTypeBufferView);
401     if (inserted.second) {
402         // Only need to complete the cross-reference if this is a new item
403         view_state->cb_bindings.insert(cb_node);
404         auto buffer_state = GetBufferState(view_state->create_info.buffer);
405         // Add bindings for buffer within bufferView
406         if (buffer_state) {
407             AddCommandBufferBindingBuffer(cb_node, buffer_state);
408         }
409     }
410 }
411 
412 // Create binding link between given acceleration structure and command buffer node
AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE * cb_node,ACCELERATION_STRUCTURE_STATE * as_state)413 void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
414                                                                           ACCELERATION_STRUCTURE_STATE *as_state) {
415     if (disabled.command_buffer_state) {
416         return;
417     }
418     auto as_inserted = cb_node->object_bindings.emplace(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV);
419     if (as_inserted.second) {
420         // Only need to complete the cross-reference if this is a new item
421         as_state->cb_bindings.insert(cb_node);
422         // Now update CB binding in MemObj mini CB list
423         for (auto mem_binding : as_state->GetBoundMemory()) {
424             DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
425             if (pMemInfo) {
426                 // Now update CBInfo's Mem reference list
427                 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
428                 if (mem_inserted.second) {
429                     // Only need to complete the cross-reference if this is a new item
430                     pMemInfo->cb_bindings.insert(cb_node);
431                 }
432             }
433         }
434     }
435 }
436 
437 // For every mem obj bound to particular CB, free bindings related to that CB
ClearCmdBufAndMemReferences(CMD_BUFFER_STATE * cb_node)438 void ValidationStateTracker::ClearCmdBufAndMemReferences(CMD_BUFFER_STATE *cb_node) {
439     if (cb_node) {
440         if (cb_node->memObjs.size() > 0) {
441             for (auto mem : cb_node->memObjs) {
442                 DEVICE_MEMORY_STATE *pInfo = GetDevMemState(mem);
443                 if (pInfo) {
444                     pInfo->cb_bindings.erase(cb_node);
445                 }
446             }
447             cb_node->memObjs.clear();
448         }
449     }
450 }
451 
452 // Clear a single object binding from given memory object
ClearMemoryObjectBinding(const VulkanTypedHandle & typed_handle,VkDeviceMemory mem)453 void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
454     DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
455     // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
456     if (mem_info) {
457         mem_info->obj_bindings.erase(typed_handle);
458     }
459 }
460 
461 // ClearMemoryObjectBindings clears the binding of objects to memory
462 //  For the given object it pulls the memory bindings and makes sure that the bindings
463 //  no longer refer to the object being cleared. This occurs when objects are destroyed.
ClearMemoryObjectBindings(const VulkanTypedHandle & typed_handle)464 void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
465     BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
466     if (mem_binding) {
467         if (!mem_binding->sparse) {
468             ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
469         } else {  // Sparse, clear all bindings
470             for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
471                 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
472             }
473         }
474     }
475 }
476 
477 // For given mem object, verify that it is not null or UNBOUND, if it is, report error. Return skip value.
VerifyBoundMemoryIsValid(VkDeviceMemory mem,const VulkanTypedHandle & typed_handle,const char * api_name,const char * error_code) const478 bool CoreChecks::VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *api_name,
479                                           const char *error_code) const {
480     bool result = false;
481     auto type_name = object_string[typed_handle.type];
482     if (VK_NULL_HANDLE == mem) {
483         result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
484                          error_code, "%s: %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().",
485                          api_name, report_data->FormatHandle(typed_handle).c_str(), type_name + 2);
486     } else if (MEMORY_UNBOUND == mem) {
487         result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
488                          error_code,
489                          "%s: %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
490                          "prior to this operation.",
491                          api_name, report_data->FormatHandle(typed_handle).c_str());
492     }
493     return result;
494 }
495 
496 // Check to see if memory was ever bound to this image
ValidateMemoryIsBoundToImage(const IMAGE_STATE * image_state,const char * api_name,const char * error_code) const497 bool CoreChecks::ValidateMemoryIsBoundToImage(const IMAGE_STATE *image_state, const char *api_name, const char *error_code) const {
498     bool result = false;
499     if (image_state->create_from_swapchain != VK_NULL_HANDLE) {
500         if (image_state->bind_swapchain == VK_NULL_HANDLE) {
501             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
502                     HandleToUint64(image_state->image), error_code,
503                     "%s: %s is created by %s, and the image should be bound by calling vkBindImageMemory2(), and the pNext chain "
504                     "includes VkBindImageMemorySwapchainInfoKHR.",
505                     api_name, report_data->FormatHandle(image_state->image).c_str(),
506                     report_data->FormatHandle(image_state->create_from_swapchain).c_str());
507         }
508     } else if (0 == (static_cast<uint32_t>(image_state->createInfo.flags) & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
509         result = VerifyBoundMemoryIsValid(image_state->binding.mem, VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage),
510                                           api_name, error_code);
511     }
512     return result;
513 }
514 
515 // Check to see if memory was bound to this buffer
ValidateMemoryIsBoundToBuffer(const BUFFER_STATE * buffer_state,const char * api_name,const char * error_code) const516 bool CoreChecks::ValidateMemoryIsBoundToBuffer(const BUFFER_STATE *buffer_state, const char *api_name,
517                                                const char *error_code) const {
518     bool result = false;
519     if (0 == (static_cast<uint32_t>(buffer_state->createInfo.flags) & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)) {
520         result = VerifyBoundMemoryIsValid(buffer_state->binding.mem,
521                                           VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), api_name, error_code);
522     }
523     return result;
524 }
525 
526 // Check to see if memory was bound to this acceleration structure
ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE * as_state,const char * api_name,const char * error_code) const527 bool CoreChecks::ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE *as_state, const char *api_name,
528                                                               const char *error_code) const {
529     return VerifyBoundMemoryIsValid(as_state->binding.mem,
530                                     VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV),
531                                     api_name, error_code);
532 }
533 
534 // SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
535 // Corresponding valid usage checks are in ValidateSetMemBinding().
SetMemBinding(VkDeviceMemory mem,BINDABLE * mem_binding,VkDeviceSize memory_offset,const VulkanTypedHandle & typed_handle)536 void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
537                                            const VulkanTypedHandle &typed_handle) {
538     assert(mem_binding);
539     mem_binding->binding.mem = mem;
540     mem_binding->UpdateBoundMemorySet();  // force recreation of cached set
541     mem_binding->binding.offset = memory_offset;
542     mem_binding->binding.size = mem_binding->requirements.size;
543 
544     if (mem != VK_NULL_HANDLE) {
545         DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
546         if (mem_info) {
547             mem_info->obj_bindings.insert(typed_handle);
548             // For image objects, make sure default memory state is correctly set
549             // TODO : What's the best/correct way to handle this?
550             if (kVulkanObjectTypeImage == typed_handle.type) {
551                 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
552                 if (image_state) {
553                     VkImageCreateInfo ici = image_state->createInfo;
554                     if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
555                         // TODO::  More memory state transition stuff.
556                     }
557                 }
558             }
559         }
560     }
561 }
562 
563 // Valid usage checks for a call to SetMemBinding().
564 // For NULL mem case, output warning
565 // Make sure given object is in global object map
566 //  IF a previous binding existed, output validation error
567 //  Otherwise, add reference from objectInfo to memoryInfo
568 //  Add reference off of objInfo
569 // TODO: We may need to refactor or pass in multiple valid usage statements to handle multiple valid usage conditions.
ValidateSetMemBinding(VkDeviceMemory mem,const VulkanTypedHandle & typed_handle,const char * apiName) const570 bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *apiName) const {
571     bool skip = false;
572     // It's an error to bind an object to NULL memory
573     if (mem != VK_NULL_HANDLE) {
574         const BINDABLE *mem_binding = ValidationStateTracker::GetObjectMemBinding(typed_handle);
575         assert(mem_binding);
576         if (mem_binding->sparse) {
577             const char *error_code = "VUID-vkBindImageMemory-image-01045";
578             const char *handle_type = "IMAGE";
579             if (typed_handle.type == kVulkanObjectTypeBuffer) {
580                 error_code = "VUID-vkBindBufferMemory-buffer-01030";
581                 handle_type = "BUFFER";
582             } else {
583                 assert(typed_handle.type == kVulkanObjectTypeImage);
584             }
585             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
586                             HandleToUint64(mem), error_code,
587                             "In %s, attempting to bind %s to %s which was created with sparse memory flags "
588                             "(VK_%s_CREATE_SPARSE_*_BIT).",
589                             apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
590                             handle_type);
591         }
592         const DEVICE_MEMORY_STATE *mem_info = ValidationStateTracker::GetDevMemState(mem);
593         if (mem_info) {
594             const DEVICE_MEMORY_STATE *prev_binding = ValidationStateTracker::GetDevMemState(mem_binding->binding.mem);
595             if (prev_binding) {
596                 const char *error_code = "VUID-vkBindImageMemory-image-01044";
597                 if (typed_handle.type == kVulkanObjectTypeBuffer) {
598                     error_code = "VUID-vkBindBufferMemory-buffer-01029";
599                 } else {
600                     assert(typed_handle.type == kVulkanObjectTypeImage);
601                 }
602                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
603                                 HandleToUint64(mem), error_code,
604                                 "In %s, attempting to bind %s to %s which has already been bound to %s.", apiName,
605                                 report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
606                                 report_data->FormatHandle(prev_binding->mem).c_str());
607             } else if (mem_binding->binding.mem == MEMORY_UNBOUND) {
608                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
609                                 HandleToUint64(mem), kVUID_Core_MemTrack_RebindObject,
610                                 "In %s, attempting to bind %s to %s which was previous bound to memory that has "
611                                 "since been freed. Memory bindings are immutable in "
612                                 "Vulkan so this attempt to bind to new memory is not allowed.",
613                                 apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str());
614             }
615         }
616     }
617     return skip;
618 }
619 
620 // For NULL mem case, clear any previous binding Else...
621 // Make sure given object is in its object map
622 //  IF a previous binding existed, update binding
623 //  Add reference from objectInfo to memoryInfo
624 //  Add reference off of object's binding info
625 // Return VK_TRUE if addition is successful, VK_FALSE otherwise
SetSparseMemBinding(MEM_BINDING binding,const VulkanTypedHandle & typed_handle)626 bool CoreChecks::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
627     bool skip = VK_FALSE;
628     // Handle NULL case separately, just clear previous binding & decrement reference
629     if (binding.mem == VK_NULL_HANDLE) {
630         // TODO : This should cause the range of the resource to be unbound according to spec
631     } else {
632         BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
633         assert(mem_binding);
634         if (mem_binding) {  // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
635             assert(mem_binding->sparse);
636             DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
637             if (mem_info) {
638                 mem_info->obj_bindings.insert(typed_handle);
639                 // Need to set mem binding for this object
640                 mem_binding->sparse_bindings.insert(binding);
641                 mem_binding->UpdateBoundMemorySet();
642             }
643         }
644     }
645     return skip;
646 }
647 
ValidateDeviceQueueFamily(uint32_t queue_family,const char * cmd_name,const char * parameter_name,const char * error_code,bool optional=false) const648 bool CoreChecks::ValidateDeviceQueueFamily(uint32_t queue_family, const char *cmd_name, const char *parameter_name,
649                                            const char *error_code, bool optional = false) const {
650     bool skip = false;
651     if (!optional && queue_family == VK_QUEUE_FAMILY_IGNORED) {
652         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
653                         error_code,
654                         "%s: %s is VK_QUEUE_FAMILY_IGNORED, but it is required to provide a valid queue family index value.",
655                         cmd_name, parameter_name);
656     } else if (queue_family_index_map.find(queue_family) == queue_family_index_map.end()) {
657         skip |= log_msg(
658             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), error_code,
659             "%s: %s (= %" PRIu32
660             ") is not one of the queue families given via VkDeviceQueueCreateInfo structures when the device was created.",
661             cmd_name, parameter_name, queue_family);
662     }
663 
664     return skip;
665 }
666 
ValidateQueueFamilies(uint32_t queue_family_count,const uint32_t * queue_families,const char * cmd_name,const char * array_parameter_name,const char * unique_error_code,const char * valid_error_code,bool optional=false) const667 bool CoreChecks::ValidateQueueFamilies(uint32_t queue_family_count, const uint32_t *queue_families, const char *cmd_name,
668                                        const char *array_parameter_name, const char *unique_error_code,
669                                        const char *valid_error_code, bool optional = false) const {
670     bool skip = false;
671     if (queue_families) {
672         std::unordered_set<uint32_t> set;
673         for (uint32_t i = 0; i < queue_family_count; ++i) {
674             std::string parameter_name = std::string(array_parameter_name) + "[" + std::to_string(i) + "]";
675 
676             if (set.count(queue_families[i])) {
677                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
678                                 HandleToUint64(device), unique_error_code, "%s: %s (=%" PRIu32 ") is not unique within %s array.",
679                                 cmd_name, parameter_name.c_str(), queue_families[i], array_parameter_name);
680             } else {
681                 set.insert(queue_families[i]);
682                 skip |= ValidateDeviceQueueFamily(queue_families[i], cmd_name, parameter_name.c_str(), valid_error_code, optional);
683             }
684         }
685     }
686     return skip;
687 }
688 
689 // Check object status for selected flag state
ValidateStatus(const CMD_BUFFER_STATE * pNode,CBStatusFlags status_mask,VkFlags msg_flags,const char * fail_msg,const char * msg_code) const690 bool CoreChecks::ValidateStatus(const CMD_BUFFER_STATE *pNode, CBStatusFlags status_mask, VkFlags msg_flags, const char *fail_msg,
691                                 const char *msg_code) const {
692     if (!(pNode->status & status_mask)) {
693         return log_msg(report_data, msg_flags, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pNode->commandBuffer),
694                        msg_code, "%s: %s..", report_data->FormatHandle(pNode->commandBuffer).c_str(), fail_msg);
695     }
696     return false;
697 }
698 
GetRenderPassState(VkRenderPass renderpass) const699 const RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) const {
700     auto it = renderPassMap.find(renderpass);
701     if (it == renderPassMap.end()) {
702         return nullptr;
703     }
704     return it->second.get();
705 }
706 
GetRenderPassState(VkRenderPass renderpass)707 RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) {
708     auto it = renderPassMap.find(renderpass);
709     if (it == renderPassMap.end()) {
710         return nullptr;
711     }
712     return it->second.get();
713 }
714 
GetRenderPassStateSharedPtr(VkRenderPass renderpass)715 std::shared_ptr<RENDER_PASS_STATE> ValidationStateTracker::GetRenderPassStateSharedPtr(VkRenderPass renderpass) {
716     auto it = renderPassMap.find(renderpass);
717     if (it == renderPassMap.end()) {
718         return nullptr;
719     }
720     return it->second;
721 }
722 
GetDescriptorSetLayout(const ValidationStateTracker * state_data,VkDescriptorSetLayout dsLayout)723 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(const ValidationStateTracker *state_data,
724                                                                                          VkDescriptorSetLayout dsLayout) {
725     auto it = state_data->descriptorSetLayoutMap.find(dsLayout);
726     if (it == state_data->descriptorSetLayoutMap.end()) {
727         return nullptr;
728     }
729     return it->second;
730 }
731 
732 // Return true if for a given PSO, the given state enum is dynamic, else return false
IsDynamic(const PIPELINE_STATE * pPipeline,const VkDynamicState state)733 static bool IsDynamic(const PIPELINE_STATE *pPipeline, const VkDynamicState state) {
734     if (pPipeline && pPipeline->graphicsPipelineCI.pDynamicState) {
735         for (uint32_t i = 0; i < pPipeline->graphicsPipelineCI.pDynamicState->dynamicStateCount; i++) {
736             if (state == pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i]) return true;
737         }
738     }
739     return false;
740 }
741 
742 // Validate state stored as flags at time of draw call
ValidateDrawStateFlags(const CMD_BUFFER_STATE * pCB,const PIPELINE_STATE * pPipe,bool indexed,const char * msg_code) const743 bool CoreChecks::ValidateDrawStateFlags(const CMD_BUFFER_STATE *pCB, const PIPELINE_STATE *pPipe, bool indexed,
744                                         const char *msg_code) const {
745     bool result = false;
746     if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
747         pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
748         result |= ValidateStatus(pCB, CBSTATUS_LINE_WIDTH_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
749                                  "Dynamic line width state not set for this command buffer", msg_code);
750     }
751     if (pPipe->graphicsPipelineCI.pRasterizationState &&
752         (pPipe->graphicsPipelineCI.pRasterizationState->depthBiasEnable == VK_TRUE)) {
753         result |= ValidateStatus(pCB, CBSTATUS_DEPTH_BIAS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
754                                  "Dynamic depth bias state not set for this command buffer", msg_code);
755     }
756     if (pPipe->blendConstantsEnabled) {
757         result |= ValidateStatus(pCB, CBSTATUS_BLEND_CONSTANTS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
758                                  "Dynamic blend constants state not set for this command buffer", msg_code);
759     }
760     if (pPipe->graphicsPipelineCI.pDepthStencilState &&
761         (pPipe->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE)) {
762         result |= ValidateStatus(pCB, CBSTATUS_DEPTH_BOUNDS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
763                                  "Dynamic depth bounds state not set for this command buffer", msg_code);
764     }
765     if (pPipe->graphicsPipelineCI.pDepthStencilState &&
766         (pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE)) {
767         result |= ValidateStatus(pCB, CBSTATUS_STENCIL_READ_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
768                                  "Dynamic stencil read mask state not set for this command buffer", msg_code);
769         result |= ValidateStatus(pCB, CBSTATUS_STENCIL_WRITE_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
770                                  "Dynamic stencil write mask state not set for this command buffer", msg_code);
771         result |= ValidateStatus(pCB, CBSTATUS_STENCIL_REFERENCE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
772                                  "Dynamic stencil reference state not set for this command buffer", msg_code);
773     }
774     if (indexed) {
775         result |= ValidateStatus(pCB, CBSTATUS_INDEX_BUFFER_BOUND, VK_DEBUG_REPORT_ERROR_BIT_EXT,
776                                  "Index buffer object not bound to this command buffer when Indexed Draw attempted", msg_code);
777     }
778     if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
779         pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
780         const auto *line_state =
781             lvl_find_in_chain<VkPipelineRasterizationLineStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pRasterizationState->pNext);
782         if (line_state && line_state->stippledLineEnable) {
783             result |= ValidateStatus(pCB, CBSTATUS_LINE_STIPPLE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
784                                      "Dynamic line stipple state not set for this command buffer", msg_code);
785         }
786     }
787 
788     return result;
789 }
790 
LogInvalidAttachmentMessage(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,uint32_t primary_attach,uint32_t secondary_attach,const char * msg,const char * caller,const char * error_code) const791 bool CoreChecks::LogInvalidAttachmentMessage(const char *type1_string, const RENDER_PASS_STATE *rp1_state, const char *type2_string,
792                                              const RENDER_PASS_STATE *rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
793                                              const char *msg, const char *caller, const char *error_code) const {
794     return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
795                    HandleToUint64(rp1_state->renderPass), error_code,
796                    "%s: RenderPasses incompatible between %s w/ %s and %s w/ %s Attachment %u is not "
797                    "compatible with %u: %s.",
798                    caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(), type2_string,
799                    report_data->FormatHandle(rp2_state->renderPass).c_str(), primary_attach, secondary_attach, msg);
800 }
801 
ValidateAttachmentCompatibility(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,uint32_t primary_attach,uint32_t secondary_attach,const char * caller,const char * error_code) const802 bool CoreChecks::ValidateAttachmentCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
803                                                  const char *type2_string, const RENDER_PASS_STATE *rp2_state,
804                                                  uint32_t primary_attach, uint32_t secondary_attach, const char *caller,
805                                                  const char *error_code) const {
806     bool skip = false;
807     const auto &primaryPassCI = rp1_state->createInfo;
808     const auto &secondaryPassCI = rp2_state->createInfo;
809     if (primaryPassCI.attachmentCount <= primary_attach) {
810         primary_attach = VK_ATTACHMENT_UNUSED;
811     }
812     if (secondaryPassCI.attachmentCount <= secondary_attach) {
813         secondary_attach = VK_ATTACHMENT_UNUSED;
814     }
815     if (primary_attach == VK_ATTACHMENT_UNUSED && secondary_attach == VK_ATTACHMENT_UNUSED) {
816         return skip;
817     }
818     if (primary_attach == VK_ATTACHMENT_UNUSED) {
819         skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
820                                             "The first is unused while the second is not.", caller, error_code);
821         return skip;
822     }
823     if (secondary_attach == VK_ATTACHMENT_UNUSED) {
824         skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
825                                             "The second is unused while the first is not.", caller, error_code);
826         return skip;
827     }
828     if (primaryPassCI.pAttachments[primary_attach].format != secondaryPassCI.pAttachments[secondary_attach].format) {
829         skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
830                                             "They have different formats.", caller, error_code);
831     }
832     if (primaryPassCI.pAttachments[primary_attach].samples != secondaryPassCI.pAttachments[secondary_attach].samples) {
833         skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
834                                             "They have different samples.", caller, error_code);
835     }
836     if (primaryPassCI.pAttachments[primary_attach].flags != secondaryPassCI.pAttachments[secondary_attach].flags) {
837         skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
838                                             "They have different flags.", caller, error_code);
839     }
840 
841     return skip;
842 }
843 
ValidateSubpassCompatibility(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,const int subpass,const char * caller,const char * error_code) const844 bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
845                                               const char *type2_string, const RENDER_PASS_STATE *rp2_state, const int subpass,
846                                               const char *caller, const char *error_code) const {
847     bool skip = false;
848     const auto &primary_desc = rp1_state->createInfo.pSubpasses[subpass];
849     const auto &secondary_desc = rp2_state->createInfo.pSubpasses[subpass];
850     uint32_t maxInputAttachmentCount = std::max(primary_desc.inputAttachmentCount, secondary_desc.inputAttachmentCount);
851     for (uint32_t i = 0; i < maxInputAttachmentCount; ++i) {
852         uint32_t primary_input_attach = VK_ATTACHMENT_UNUSED, secondary_input_attach = VK_ATTACHMENT_UNUSED;
853         if (i < primary_desc.inputAttachmentCount) {
854             primary_input_attach = primary_desc.pInputAttachments[i].attachment;
855         }
856         if (i < secondary_desc.inputAttachmentCount) {
857             secondary_input_attach = secondary_desc.pInputAttachments[i].attachment;
858         }
859         skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_input_attach,
860                                                 secondary_input_attach, caller, error_code);
861     }
862     uint32_t maxColorAttachmentCount = std::max(primary_desc.colorAttachmentCount, secondary_desc.colorAttachmentCount);
863     for (uint32_t i = 0; i < maxColorAttachmentCount; ++i) {
864         uint32_t primary_color_attach = VK_ATTACHMENT_UNUSED, secondary_color_attach = VK_ATTACHMENT_UNUSED;
865         if (i < primary_desc.colorAttachmentCount) {
866             primary_color_attach = primary_desc.pColorAttachments[i].attachment;
867         }
868         if (i < secondary_desc.colorAttachmentCount) {
869             secondary_color_attach = secondary_desc.pColorAttachments[i].attachment;
870         }
871         skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_color_attach,
872                                                 secondary_color_attach, caller, error_code);
873         if (rp1_state->createInfo.subpassCount > 1) {
874             uint32_t primary_resolve_attach = VK_ATTACHMENT_UNUSED, secondary_resolve_attach = VK_ATTACHMENT_UNUSED;
875             if (i < primary_desc.colorAttachmentCount && primary_desc.pResolveAttachments) {
876                 primary_resolve_attach = primary_desc.pResolveAttachments[i].attachment;
877             }
878             if (i < secondary_desc.colorAttachmentCount && secondary_desc.pResolveAttachments) {
879                 secondary_resolve_attach = secondary_desc.pResolveAttachments[i].attachment;
880             }
881             skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_resolve_attach,
882                                                     secondary_resolve_attach, caller, error_code);
883         }
884     }
885     uint32_t primary_depthstencil_attach = VK_ATTACHMENT_UNUSED, secondary_depthstencil_attach = VK_ATTACHMENT_UNUSED;
886     if (primary_desc.pDepthStencilAttachment) {
887         primary_depthstencil_attach = primary_desc.pDepthStencilAttachment[0].attachment;
888     }
889     if (secondary_desc.pDepthStencilAttachment) {
890         secondary_depthstencil_attach = secondary_desc.pDepthStencilAttachment[0].attachment;
891     }
892     skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_depthstencil_attach,
893                                             secondary_depthstencil_attach, caller, error_code);
894     return skip;
895 }
896 
897 // Verify that given renderPass CreateInfo for primary and secondary command buffers are compatible.
898 //  This function deals directly with the CreateInfo, there are overloaded versions below that can take the renderPass handle and
899 //  will then feed into this function
ValidateRenderPassCompatibility(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,const char * caller,const char * error_code) const900 bool CoreChecks::ValidateRenderPassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
901                                                  const char *type2_string, const RENDER_PASS_STATE *rp2_state, const char *caller,
902                                                  const char *error_code) const {
903     bool skip = false;
904 
905     if (rp1_state->createInfo.subpassCount != rp2_state->createInfo.subpassCount) {
906         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
907                         HandleToUint64(rp1_state->renderPass), error_code,
908                         "%s: RenderPasses incompatible between %s w/ %s with a subpassCount of %u and %s w/ "
909                         "%s with a subpassCount of %u.",
910                         caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(),
911                         rp1_state->createInfo.subpassCount, type2_string, report_data->FormatHandle(rp2_state->renderPass).c_str(),
912                         rp2_state->createInfo.subpassCount);
913     } else {
914         for (uint32_t i = 0; i < rp1_state->createInfo.subpassCount; ++i) {
915             skip |= ValidateSubpassCompatibility(type1_string, rp1_state, type2_string, rp2_state, i, caller, error_code);
916         }
917     }
918     return skip;
919 }
920 
921 // For given pipeline, return number of MSAA samples, or one if MSAA disabled
GetNumSamples(PIPELINE_STATE const * pipe)922 static VkSampleCountFlagBits GetNumSamples(PIPELINE_STATE const *pipe) {
923     if (pipe->graphicsPipelineCI.pMultisampleState != NULL &&
924         VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO == pipe->graphicsPipelineCI.pMultisampleState->sType) {
925         return pipe->graphicsPipelineCI.pMultisampleState->rasterizationSamples;
926     }
927     return VK_SAMPLE_COUNT_1_BIT;
928 }
929 
ListBits(std::ostream & s,uint32_t bits)930 static void ListBits(std::ostream &s, uint32_t bits) {
931     for (int i = 0; i < 32 && bits; i++) {
932         if (bits & (1 << i)) {
933             s << i;
934             bits &= ~(1 << i);
935             if (bits) {
936                 s << ",";
937             }
938         }
939     }
940 }
941 
942 // Validate draw-time state related to the PSO
ValidatePipelineDrawtimeState(const LAST_BOUND_STATE & state,const CMD_BUFFER_STATE * pCB,CMD_TYPE cmd_type,const PIPELINE_STATE * pPipeline,const char * caller) const943 bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, const CMD_BUFFER_STATE *pCB, CMD_TYPE cmd_type,
944                                                const PIPELINE_STATE *pPipeline, const char *caller) const {
945     bool skip = false;
946     const auto &current_vtx_bfr_binding_info = pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings;
947 
948     // Verify vertex binding
949     if (pPipeline->vertex_binding_descriptions_.size() > 0) {
950         for (size_t i = 0; i < pPipeline->vertex_binding_descriptions_.size(); i++) {
951             const auto vertex_binding = pPipeline->vertex_binding_descriptions_[i].binding;
952             if ((current_vtx_bfr_binding_info.size() < (vertex_binding + 1)) ||
953                 (current_vtx_bfr_binding_info[vertex_binding].buffer == VK_NULL_HANDLE)) {
954                 skip |=
955                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
956                             HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
957                             "%s expects that this Command Buffer's vertex binding Index %u should be set via "
958                             "vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct at "
959                             "index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
960                             report_data->FormatHandle(state.pipeline_state->pipeline).c_str(), vertex_binding, i, vertex_binding);
961             }
962         }
963 
964         // Verify vertex attribute address alignment
965         for (size_t i = 0; i < pPipeline->vertex_attribute_descriptions_.size(); i++) {
966             const auto &attribute_description = pPipeline->vertex_attribute_descriptions_[i];
967             const auto vertex_binding = attribute_description.binding;
968             const auto attribute_offset = attribute_description.offset;
969             const auto attribute_format = attribute_description.format;
970 
971             const auto &vertex_binding_map_it = pPipeline->vertex_binding_to_index_map_.find(vertex_binding);
972             if ((vertex_binding_map_it != pPipeline->vertex_binding_to_index_map_.cend()) &&
973                 (vertex_binding < current_vtx_bfr_binding_info.size()) &&
974                 (current_vtx_bfr_binding_info[vertex_binding].buffer != VK_NULL_HANDLE)) {
975                 const auto vertex_buffer_stride = pPipeline->vertex_binding_descriptions_[vertex_binding_map_it->second].stride;
976                 const auto vertex_buffer_offset = current_vtx_bfr_binding_info[vertex_binding].offset;
977                 const auto buffer_state = GetBufferState(current_vtx_bfr_binding_info[vertex_binding].buffer);
978 
979                 // Use only memory binding offset as base memory should be properly aligned by the driver
980                 const auto buffer_binding_address = buffer_state->binding.offset + vertex_buffer_offset;
981                 // Use 1 as vertex/instance index to use buffer stride as well
982                 const auto attrib_address = buffer_binding_address + vertex_buffer_stride + attribute_offset;
983 
984                 VkDeviceSize vtx_attrib_req_alignment = FormatElementSize(attribute_format);
985                 if (FormatElementIsTexel(attribute_format)) {
986                     vtx_attrib_req_alignment = SafeDivision(vtx_attrib_req_alignment, FormatChannelCount(attribute_format));
987                 }
988 
989                 if (SafeModulo(attrib_address, vtx_attrib_req_alignment) != 0) {
990                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
991                                     HandleToUint64(current_vtx_bfr_binding_info[vertex_binding].buffer),
992                                     kVUID_Core_DrawState_InvalidVtxAttributeAlignment,
993                                     "Invalid attribAddress alignment for vertex attribute " PRINTF_SIZE_T_SPECIFIER
994                                     " from %s and vertex %s.",
995                                     i, report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
996                                     report_data->FormatHandle(current_vtx_bfr_binding_info[vertex_binding].buffer).c_str());
997                 }
998             }
999         }
1000     } else {
1001         if ((!current_vtx_bfr_binding_info.empty()) && (!pCB->vertex_buffer_used)) {
1002             skip |=
1003                 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1004                         HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
1005                         "Vertex buffers are bound to %s but no vertex buffers are attached to %s.",
1006                         report_data->FormatHandle(pCB->commandBuffer).c_str(),
1007                         report_data->FormatHandle(state.pipeline_state->pipeline).c_str());
1008         }
1009     }
1010 
1011     // If Viewport or scissors are dynamic, verify that dynamic count matches PSO count.
1012     // Skip check if rasterization is disabled or there is no viewport.
1013     if ((!pPipeline->graphicsPipelineCI.pRasterizationState ||
1014          (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) &&
1015         pPipeline->graphicsPipelineCI.pViewportState) {
1016         bool dynViewport = IsDynamic(pPipeline, VK_DYNAMIC_STATE_VIEWPORT);
1017         bool dynScissor = IsDynamic(pPipeline, VK_DYNAMIC_STATE_SCISSOR);
1018 
1019         if (dynViewport) {
1020             const auto requiredViewportsMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->viewportCount) - 1;
1021             const auto missingViewportMask = ~pCB->viewportMask & requiredViewportsMask;
1022             if (missingViewportMask) {
1023                 std::stringstream ss;
1024                 ss << "Dynamic viewport(s) ";
1025                 ListBits(ss, missingViewportMask);
1026                 ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetViewport().";
1027                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1028                                 kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
1029             }
1030         }
1031 
1032         if (dynScissor) {
1033             const auto requiredScissorMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->scissorCount) - 1;
1034             const auto missingScissorMask = ~pCB->scissorMask & requiredScissorMask;
1035             if (missingScissorMask) {
1036                 std::stringstream ss;
1037                 ss << "Dynamic scissor(s) ";
1038                 ListBits(ss, missingScissorMask);
1039                 ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetScissor().";
1040                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1041                                 kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
1042             }
1043         }
1044     }
1045 
1046     // Verify that any MSAA request in PSO matches sample# in bound FB
1047     // Skip the check if rasterization is disabled.
1048     if (!pPipeline->graphicsPipelineCI.pRasterizationState ||
1049         (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) {
1050         VkSampleCountFlagBits pso_num_samples = GetNumSamples(pPipeline);
1051         if (pCB->activeRenderPass) {
1052             const auto render_pass_info = pCB->activeRenderPass->createInfo.ptr();
1053             const VkSubpassDescription2KHR *subpass_desc = &render_pass_info->pSubpasses[pCB->activeSubpass];
1054             uint32_t i;
1055             unsigned subpass_num_samples = 0;
1056 
1057             for (i = 0; i < subpass_desc->colorAttachmentCount; i++) {
1058                 const auto attachment = subpass_desc->pColorAttachments[i].attachment;
1059                 if (attachment != VK_ATTACHMENT_UNUSED)
1060                     subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
1061             }
1062 
1063             if (subpass_desc->pDepthStencilAttachment &&
1064                 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1065                 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
1066                 subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
1067             }
1068 
1069             if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples) &&
1070                 ((subpass_num_samples & static_cast<unsigned>(pso_num_samples)) != subpass_num_samples)) {
1071                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1072                                 HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NumSamplesMismatch,
1073                                 "Num samples mismatch! At draw-time in %s with %u samples while current %s w/ "
1074                                 "%u samples!",
1075                                 report_data->FormatHandle(pPipeline->pipeline).c_str(), pso_num_samples,
1076                                 report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), subpass_num_samples);
1077             }
1078         } else {
1079             skip |=
1080                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1081                         HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NoActiveRenderpass,
1082                         "No active render pass found at draw-time in %s!", report_data->FormatHandle(pPipeline->pipeline).c_str());
1083         }
1084     }
1085     // Verify that PSO creation renderPass is compatible with active renderPass
1086     if (pCB->activeRenderPass) {
1087         // TODO: Move all of the error codes common across different Draws into a LUT accessed by cmd_type
1088         // TODO: AMD extension codes are included here, but actual function entrypoints are not yet intercepted
1089         // Error codes for renderpass and subpass mismatches
1090         auto rp_error = "VUID-vkCmdDraw-renderPass-02684", sp_error = "VUID-vkCmdDraw-subpass-02685";
1091         switch (cmd_type) {
1092             case CMD_DRAWINDEXED:
1093                 rp_error = "VUID-vkCmdDrawIndexed-renderPass-02684";
1094                 sp_error = "VUID-vkCmdDrawIndexed-subpass-02685";
1095                 break;
1096             case CMD_DRAWINDIRECT:
1097                 rp_error = "VUID-vkCmdDrawIndirect-renderPass-02684";
1098                 sp_error = "VUID-vkCmdDrawIndirect-subpass-02685";
1099                 break;
1100             case CMD_DRAWINDIRECTCOUNTKHR:
1101                 rp_error = "VUID-vkCmdDrawIndirectCountKHR-renderPass-02684";
1102                 sp_error = "VUID-vkCmdDrawIndirectCountKHR-subpass-02685";
1103                 break;
1104             case CMD_DRAWINDEXEDINDIRECT:
1105                 rp_error = "VUID-vkCmdDrawIndexedIndirect-renderPass-02684";
1106                 sp_error = "VUID-vkCmdDrawIndexedIndirect-subpass-02685";
1107                 break;
1108             case CMD_DRAWINDEXEDINDIRECTCOUNTKHR:
1109                 rp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684";
1110                 sp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685";
1111                 break;
1112             case CMD_DRAWMESHTASKSNV:
1113                 rp_error = "VUID-vkCmdDrawMeshTasksNV-renderPass-02684";
1114                 sp_error = "VUID-vkCmdDrawMeshTasksNV-subpass-02685";
1115                 break;
1116             case CMD_DRAWMESHTASKSINDIRECTNV:
1117                 rp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684";
1118                 sp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685";
1119                 break;
1120             case CMD_DRAWMESHTASKSINDIRECTCOUNTNV:
1121                 rp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684";
1122                 sp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685";
1123                 break;
1124             default:
1125                 assert(CMD_DRAW == cmd_type);
1126                 break;
1127         }
1128         if (pCB->activeRenderPass->renderPass != pPipeline->rp_state->renderPass) {
1129             // renderPass that PSO was created with must be compatible with active renderPass that PSO is being used with
1130             skip |= ValidateRenderPassCompatibility("active render pass", pCB->activeRenderPass, "pipeline state object",
1131                                                     pPipeline->rp_state.get(), caller, rp_error);
1132         }
1133         if (pPipeline->graphicsPipelineCI.subpass != pCB->activeSubpass) {
1134             skip |=
1135                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1136                         HandleToUint64(pPipeline->pipeline), sp_error, "Pipeline was built for subpass %u but used in subpass %u.",
1137                         pPipeline->graphicsPipelineCI.subpass, pCB->activeSubpass);
1138         }
1139     }
1140 
1141     return skip;
1142 }
1143 
1144 // For given cvdescriptorset::DescriptorSet, verify that its Set is compatible w/ the setLayout corresponding to
1145 // pipelineLayout[layoutIndex]
VerifySetLayoutCompatibility(const cvdescriptorset::DescriptorSet * descriptor_set,PIPELINE_LAYOUT_STATE const * pipeline_layout,const uint32_t layoutIndex,string & errorMsg)1146 static bool VerifySetLayoutCompatibility(const cvdescriptorset::DescriptorSet *descriptor_set,
1147                                          PIPELINE_LAYOUT_STATE const *pipeline_layout, const uint32_t layoutIndex,
1148                                          string &errorMsg) {
1149     auto num_sets = pipeline_layout->set_layouts.size();
1150     if (layoutIndex >= num_sets) {
1151         stringstream errorStr;
1152         errorStr << "VkPipelineLayout (" << pipeline_layout->layout << ") only contains " << num_sets
1153                  << " setLayouts corresponding to sets 0-" << num_sets - 1 << ", but you're attempting to bind set to index "
1154                  << layoutIndex;
1155         errorMsg = errorStr.str();
1156         return false;
1157     }
1158     if (descriptor_set->IsPushDescriptor()) return true;
1159     auto layout_node = pipeline_layout->set_layouts[layoutIndex];
1160     return cvdescriptorset::VerifySetLayoutCompatibility(layout_node.get(), descriptor_set->GetLayout().get(), &errorMsg);
1161 }
1162 
1163 // Validate overall state at the time of a draw call
ValidateCmdBufDrawState(const CMD_BUFFER_STATE * cb_node,CMD_TYPE cmd_type,const bool indexed,const VkPipelineBindPoint bind_point,const char * function,const char * pipe_err_code,const char * state_err_code) const1164 bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TYPE cmd_type, const bool indexed,
1165                                          const VkPipelineBindPoint bind_point, const char *function, const char *pipe_err_code,
1166                                          const char *state_err_code) const {
1167     const auto last_bound_it = cb_node->lastBound.find(bind_point);
1168     const PIPELINE_STATE *pPipe = nullptr;
1169     if (last_bound_it != cb_node->lastBound.cend()) {
1170         pPipe = last_bound_it->second.pipeline_state;
1171     }
1172 
1173     if (nullptr == pPipe) {
1174         return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1175                        HandleToUint64(cb_node->commandBuffer), pipe_err_code,
1176                        "Must not call %s on this command buffer while there is no %s pipeline bound.", function,
1177                        bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS ? "Graphics" : "Compute");
1178     }
1179 
1180     bool result = false;
1181     auto const &state = last_bound_it->second;
1182 
1183     // First check flag states
1184     if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) result = ValidateDrawStateFlags(cb_node, pPipe, indexed, state_err_code);
1185 
1186     // Now complete other state checks
1187     string errorString;
1188     auto const &pipeline_layout = pPipe->pipeline_layout;
1189 
1190     for (const auto &set_binding_pair : pPipe->active_slots) {
1191         uint32_t setIndex = set_binding_pair.first;
1192         // If valid set is not bound throw an error
1193         if ((state.per_set.size() <= setIndex) || (!state.per_set[setIndex].bound_descriptor_set)) {
1194             result |=
1195                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1196                         HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_DescriptorSetNotBound,
1197                         "%s uses set #%u but that set is not bound.", report_data->FormatHandle(pPipe->pipeline).c_str(), setIndex);
1198         } else if (!VerifySetLayoutCompatibility(state.per_set[setIndex].bound_descriptor_set, &pipeline_layout, setIndex,
1199                                                  errorString)) {
1200             // Set is bound but not compatible w/ overlapping pipeline_layout from PSO
1201             VkDescriptorSet setHandle = state.per_set[setIndex].bound_descriptor_set->GetSet();
1202             result |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1203                               HandleToUint64(setHandle), kVUID_Core_DrawState_PipelineLayoutsIncompatible,
1204                               "%s bound as set #%u is not compatible with overlapping %s due to: %s",
1205                               report_data->FormatHandle(setHandle).c_str(), setIndex,
1206                               report_data->FormatHandle(pipeline_layout.layout).c_str(), errorString.c_str());
1207         } else {  // Valid set is bound and layout compatible, validate that it's updated
1208             // Pull the set node
1209             const cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
1210             // Validate the draw-time state for this descriptor set
1211             std::string err_str;
1212             if (!descriptor_set->IsPushDescriptor()) {
1213                 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor
1214                 // binding validation. Take the requested binding set and prefilter it to eliminate redundant validation checks.
1215                 // Here, the currently bound pipeline determines whether an image validation check is redundant...
1216                 // for images are the "req" portion of the binding_req is indirectly (but tightly) coupled to the pipeline.
1217                 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
1218                 const auto &binding_req_map = reduced_map.FilteredMap(*cb_node, *pPipe);
1219 
1220                 // We can skip validating the descriptor set if "nothing" has changed since the last validation.
1221                 // Same set, no image layout changes, and same "pipeline state" (binding_req_map). If there are
1222                 // any dynamic descriptors, always revalidate rather than caching the values. We currently only
1223                 // apply this optimization if IsManyDescriptors is true, to avoid the overhead of copying the
1224                 // binding_req_map which could potentially be expensive.
1225                 bool need_validate =
1226                     !reduced_map.IsManyDescriptors() ||
1227                     // Revalidate each time if the set has dynamic offsets
1228                     state.per_set[setIndex].dynamicOffsets.size() > 0 ||
1229                     // Revalidate if descriptor set (or contents) has changed
1230                     state.per_set[setIndex].validated_set != descriptor_set ||
1231                     state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
1232                     (!disabled.image_layout_validation &&
1233                      state.per_set[setIndex].validated_set_image_layout_change_count != cb_node->image_layout_change_count) ||
1234                     // Revalidate if previous bindingReqMap doesn't include new bindingRepMap
1235                     !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
1236                                    state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
1237                                    set_binding_pair.second.end());
1238 
1239                 if (need_validate) {
1240                     if (!ValidateDrawState(descriptor_set, binding_req_map, state.per_set[setIndex].dynamicOffsets, cb_node,
1241                                            function, &err_str)) {
1242                         auto set = descriptor_set->GetSet();
1243                         result |=
1244                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1245                                     HandleToUint64(set), kVUID_Core_DrawState_DescriptorSetNotUpdated,
1246                                     "%s bound as set #%u encountered the following validation error at %s time: %s",
1247                                     report_data->FormatHandle(set).c_str(), setIndex, function, err_str.c_str());
1248                     }
1249                 }
1250             }
1251         }
1252     }
1253 
1254     // Check general pipeline state that needs to be validated at drawtime
1255     if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
1256         result |= ValidatePipelineDrawtimeState(state, cb_node, cmd_type, pPipe, function);
1257 
1258     return result;
1259 }
1260 
UpdateDrawState(CMD_BUFFER_STATE * cb_state,const VkPipelineBindPoint bind_point)1261 void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
1262     auto &state = cb_state->lastBound[bind_point];
1263     PIPELINE_STATE *pPipe = state.pipeline_state;
1264     if (VK_NULL_HANDLE != state.pipeline_layout) {
1265         for (const auto &set_binding_pair : pPipe->active_slots) {
1266             uint32_t setIndex = set_binding_pair.first;
1267             // Pull the set node
1268             cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
1269             if (!descriptor_set->IsPushDescriptor()) {
1270                 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
1271 
1272                 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
1273                 // Validate pass.  Though in the case of "many" descriptors, typically the descriptor count >> binding count
1274                 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
1275                 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
1276 
1277                 if (reduced_map.IsManyDescriptors()) {
1278                     // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
1279                     descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
1280                 }
1281 
1282                 // We can skip updating the state if "nothing" has changed since the last validation.
1283                 // See CoreChecks::ValidateCmdBufDrawState for more details.
1284                 bool need_update =
1285                     !reduced_map.IsManyDescriptors() ||
1286                     // Update if descriptor set (or contents) has changed
1287                     state.per_set[setIndex].validated_set != descriptor_set ||
1288                     state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
1289                     (!disabled.image_layout_validation &&
1290                      state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count) ||
1291                     // Update if previous bindingReqMap doesn't include new bindingRepMap
1292                     !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
1293                                    state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
1294                                    set_binding_pair.second.end());
1295 
1296                 if (need_update) {
1297                     // Bind this set and its active descriptor resources to the command buffer
1298                     descriptor_set->UpdateDrawState(this, cb_state, binding_req_map);
1299 
1300                     state.per_set[setIndex].validated_set = descriptor_set;
1301                     state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
1302                     state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
1303                     if (reduced_map.IsManyDescriptors()) {
1304                         // Check whether old == new before assigning, the equality check is much cheaper than
1305                         // freeing and reallocating the map.
1306                         if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
1307                             state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
1308                         }
1309                     } else {
1310                         state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
1311                     }
1312                 }
1313             }
1314         }
1315     }
1316     if (!pPipe->vertex_binding_descriptions_.empty()) {
1317         cb_state->vertex_buffer_used = true;
1318     }
1319 }
1320 
ValidatePipelineLocked(std::vector<std::unique_ptr<PIPELINE_STATE>> const & pPipelines,int pipelineIndex) const1321 bool CoreChecks::ValidatePipelineLocked(std::vector<std::unique_ptr<PIPELINE_STATE>> const &pPipelines, int pipelineIndex) const {
1322     bool skip = false;
1323 
1324     const PIPELINE_STATE *pPipeline = pPipelines[pipelineIndex].get();
1325 
1326     // If create derivative bit is set, check that we've specified a base
1327     // pipeline correctly, and that the base pipeline was created to allow
1328     // derivatives.
1329     if (pPipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
1330         const PIPELINE_STATE *pBasePipeline = nullptr;
1331         if (!((pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) ^
1332               (pPipeline->graphicsPipelineCI.basePipelineIndex != -1))) {
1333             // This check is a superset of "VUID-VkGraphicsPipelineCreateInfo-flags-00724" and
1334             // "VUID-VkGraphicsPipelineCreateInfo-flags-00725"
1335             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1336                             HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
1337                             "Invalid Pipeline CreateInfo: exactly one of base pipeline index and handle must be specified");
1338         } else if (pPipeline->graphicsPipelineCI.basePipelineIndex != -1) {
1339             if (pPipeline->graphicsPipelineCI.basePipelineIndex >= pipelineIndex) {
1340                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1341                                 HandleToUint64(device), "VUID-vkCreateGraphicsPipelines-flags-00720",
1342                                 "Invalid Pipeline CreateInfo: base pipeline must occur earlier in array than derivative pipeline.");
1343             } else {
1344                 pBasePipeline = pPipelines[pPipeline->graphicsPipelineCI.basePipelineIndex].get();
1345             }
1346         } else if (pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) {
1347             pBasePipeline = GetPipelineState(pPipeline->graphicsPipelineCI.basePipelineHandle);
1348         }
1349 
1350         if (pBasePipeline && !(pBasePipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) {
1351             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1352                             HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
1353                             "Invalid Pipeline CreateInfo: base pipeline does not allow derivatives.");
1354         }
1355     }
1356 
1357     return skip;
1358 }
1359 
1360 // UNLOCKED pipeline validation. DO NOT lookup objects in the CoreChecks->* maps in this function.
ValidatePipelineUnlocked(const PIPELINE_STATE * pPipeline,uint32_t pipelineIndex) const1361 bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint32_t pipelineIndex) const {
1362     bool skip = false;
1363 
1364     // Ensure the subpass index is valid. If not, then ValidateGraphicsPipelineShaderState
1365     // produces nonsense errors that confuse users. Other layers should already
1366     // emit errors for renderpass being invalid.
1367     auto subpass_desc = &pPipeline->rp_state->createInfo.pSubpasses[pPipeline->graphicsPipelineCI.subpass];
1368     if (pPipeline->graphicsPipelineCI.subpass >= pPipeline->rp_state->createInfo.subpassCount) {
1369         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1370                         "VUID-VkGraphicsPipelineCreateInfo-subpass-00759",
1371                         "Invalid Pipeline CreateInfo State: Subpass index %u is out of range for this renderpass (0..%u).",
1372                         pPipeline->graphicsPipelineCI.subpass, pPipeline->rp_state->createInfo.subpassCount - 1);
1373         subpass_desc = nullptr;
1374     }
1375 
1376     if (pPipeline->graphicsPipelineCI.pColorBlendState != NULL) {
1377         const safe_VkPipelineColorBlendStateCreateInfo *color_blend_state = pPipeline->graphicsPipelineCI.pColorBlendState;
1378         if (color_blend_state->attachmentCount != subpass_desc->colorAttachmentCount) {
1379             skip |=
1380                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1381                         "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746",
1382                         "vkCreateGraphicsPipelines(): %s subpass %u has colorAttachmentCount of %u which doesn't "
1383                         "match the pColorBlendState->attachmentCount of %u.",
1384                         report_data->FormatHandle(pPipeline->rp_state->renderPass).c_str(), pPipeline->graphicsPipelineCI.subpass,
1385                         subpass_desc->colorAttachmentCount, color_blend_state->attachmentCount);
1386         }
1387         if (!enabled_features.core.independentBlend) {
1388             if (pPipeline->attachments.size() > 1) {
1389                 const VkPipelineColorBlendAttachmentState *const pAttachments = &pPipeline->attachments[0];
1390                 for (size_t i = 1; i < pPipeline->attachments.size(); i++) {
1391                     // Quoting the spec: "If [the independent blend] feature is not enabled, the VkPipelineColorBlendAttachmentState
1392                     // settings for all color attachments must be identical." VkPipelineColorBlendAttachmentState contains
1393                     // only attachment state, so memcmp is best suited for the comparison
1394                     if (memcmp(static_cast<const void *>(pAttachments), static_cast<const void *>(&pAttachments[i]),
1395                                sizeof(pAttachments[0]))) {
1396                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1397                                         HandleToUint64(device), "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605",
1398                                         "Invalid Pipeline CreateInfo: If independent blend feature not enabled, all elements of "
1399                                         "pAttachments must be identical.");
1400                         break;
1401                     }
1402                 }
1403             }
1404         }
1405         if (!enabled_features.core.logicOp && (pPipeline->graphicsPipelineCI.pColorBlendState->logicOpEnable != VK_FALSE)) {
1406             skip |=
1407                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1408                         "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606",
1409                         "Invalid Pipeline CreateInfo: If logic operations feature not enabled, logicOpEnable must be VK_FALSE.");
1410         }
1411         for (size_t i = 0; i < pPipeline->attachments.size(); i++) {
1412             if ((pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1413                 (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1414                 (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1415                 (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1416                 if (!enabled_features.core.dualSrcBlend) {
1417                     skip |=
1418                         log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1419                                 HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608",
1420                                 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1421                                 "].srcColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1422                                 "enabled.",
1423                                 pipelineIndex, i, pPipeline->attachments[i].srcColorBlendFactor);
1424                 }
1425             }
1426             if ((pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1427                 (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1428                 (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1429                 (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1430                 if (!enabled_features.core.dualSrcBlend) {
1431                     skip |=
1432                         log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1433                                 HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609",
1434                                 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1435                                 "].dstColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1436                                 "enabled.",
1437                                 pipelineIndex, i, pPipeline->attachments[i].dstColorBlendFactor);
1438                 }
1439             }
1440             if ((pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1441                 (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1442                 (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1443                 (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1444                 if (!enabled_features.core.dualSrcBlend) {
1445                     skip |=
1446                         log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1447                                 HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610",
1448                                 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1449                                 "].srcAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1450                                 "enabled.",
1451                                 pipelineIndex, i, pPipeline->attachments[i].srcAlphaBlendFactor);
1452                 }
1453             }
1454             if ((pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1455                 (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1456                 (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1457                 (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1458                 if (!enabled_features.core.dualSrcBlend) {
1459                     skip |=
1460                         log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1461                                 HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611",
1462                                 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1463                                 "].dstAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1464                                 "enabled.",
1465                                 pipelineIndex, i, pPipeline->attachments[i].dstAlphaBlendFactor);
1466                 }
1467             }
1468         }
1469     }
1470 
1471     if (ValidateGraphicsPipelineShaderState(pPipeline)) {
1472         skip = true;
1473     }
1474     // Each shader's stage must be unique
1475     if (pPipeline->duplicate_shaders) {
1476         for (uint32_t stage = VK_SHADER_STAGE_VERTEX_BIT; stage & VK_SHADER_STAGE_ALL_GRAPHICS; stage <<= 1) {
1477             if (pPipeline->duplicate_shaders & stage) {
1478                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1479                                 HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
1480                                 "Invalid Pipeline CreateInfo State: Multiple shaders provided for stage %s",
1481                                 string_VkShaderStageFlagBits(VkShaderStageFlagBits(stage)));
1482             }
1483         }
1484     }
1485     if (device_extensions.vk_nv_mesh_shader) {
1486         // VS or mesh is required
1487         if (!(pPipeline->active_shaders & (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_MESH_BIT_NV))) {
1488             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1489                             HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-stage-02096",
1490                             "Invalid Pipeline CreateInfo State: Vertex Shader or Mesh Shader required.");
1491         }
1492         // Can't mix mesh and VTG
1493         if ((pPipeline->active_shaders & (VK_SHADER_STAGE_MESH_BIT_NV | VK_SHADER_STAGE_TASK_BIT_NV)) &&
1494             (pPipeline->active_shaders &
1495              (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
1496               VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))) {
1497             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1498                             HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-pStages-02095",
1499                             "Invalid Pipeline CreateInfo State: Geometric shader stages must either be all mesh (mesh | task) "
1500                             "or all VTG (vertex, tess control, tess eval, geom).");
1501         }
1502     } else {
1503         // VS is required
1504         if (!(pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT)) {
1505             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1506                             HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-stage-00727",
1507                             "Invalid Pipeline CreateInfo State: Vertex Shader required.");
1508         }
1509     }
1510 
1511     if (!enabled_features.mesh_shader.meshShader && (pPipeline->active_shaders & VK_SHADER_STAGE_MESH_BIT_NV)) {
1512         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1513                         "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
1514                         "Invalid Pipeline CreateInfo State: Mesh Shader not supported.");
1515     }
1516 
1517     if (!enabled_features.mesh_shader.taskShader && (pPipeline->active_shaders & VK_SHADER_STAGE_TASK_BIT_NV)) {
1518         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1519                         "VUID-VkPipelineShaderStageCreateInfo-stage-02092",
1520                         "Invalid Pipeline CreateInfo State: Task Shader not supported.");
1521     }
1522 
1523     // Either both or neither TC/TE shaders should be defined
1524     bool has_control = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0;
1525     bool has_eval = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0;
1526     if (has_control && !has_eval) {
1527         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1528                         "VUID-VkGraphicsPipelineCreateInfo-pStages-00729",
1529                         "Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair.");
1530     }
1531     if (!has_control && has_eval) {
1532         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1533                         "VUID-VkGraphicsPipelineCreateInfo-pStages-00730",
1534                         "Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair.");
1535     }
1536     // Compute shaders should be specified independent of Gfx shaders
1537     if (pPipeline->active_shaders & VK_SHADER_STAGE_COMPUTE_BIT) {
1538         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1539                         "VUID-VkGraphicsPipelineCreateInfo-stage-00728",
1540                         "Invalid Pipeline CreateInfo State: Do not specify Compute Shader for Gfx Pipeline.");
1541     }
1542 
1543     if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pInputAssemblyState) {
1544         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1545                         "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
1546                         "Invalid Pipeline CreateInfo State: Missing pInputAssemblyState.");
1547     }
1548 
1549     // VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid for tessellation pipelines.
1550     // Mismatching primitive topology and tessellation fails graphics pipeline creation.
1551     if (has_control && has_eval &&
1552         (!pPipeline->graphicsPipelineCI.pInputAssemblyState ||
1553          pPipeline->graphicsPipelineCI.pInputAssemblyState->topology != VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
1554         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1555                         "VUID-VkGraphicsPipelineCreateInfo-pStages-00736",
1556                         "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA topology for "
1557                         "tessellation pipelines.");
1558     }
1559     if (pPipeline->graphicsPipelineCI.pInputAssemblyState) {
1560         if (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
1561             if (!has_control || !has_eval) {
1562                 skip |=
1563                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1564                             HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-topology-00737",
1565                             "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid "
1566                             "for tessellation pipelines.");
1567             }
1568         }
1569 
1570         if ((pPipeline->graphicsPipelineCI.pInputAssemblyState->primitiveRestartEnable == VK_TRUE) &&
1571             (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST ||
1572              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
1573              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ||
1574              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
1575              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
1576              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
1577             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1578                             HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
1579                             "topology is %s and primitiveRestartEnable is VK_TRUE. It is invalid.",
1580                             string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1581         }
1582         if ((enabled_features.core.geometryShader == VK_FALSE) &&
1583             (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
1584              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY ||
1585              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
1586              pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY)) {
1587             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1588                             HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429",
1589                             "topology is %s and geometry shaders feature is not enabled. It is invalid.",
1590                             string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1591         }
1592         if ((enabled_features.core.tessellationShader == VK_FALSE) &&
1593             (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
1594             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1595                             HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
1596                             "topology is %s and tessellation shaders feature is not enabled. It is invalid.",
1597                             string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1598         }
1599     }
1600 
1601     // If a rasterization state is provided...
1602     if (pPipeline->graphicsPipelineCI.pRasterizationState) {
1603         if ((pPipeline->graphicsPipelineCI.pRasterizationState->depthClampEnable == VK_TRUE) &&
1604             (!enabled_features.core.depthClamp)) {
1605             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1606                             HandleToUint64(device), "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782",
1607                             "vkCreateGraphicsPipelines(): the depthClamp device feature is disabled: the depthClampEnable member "
1608                             "of the VkPipelineRasterizationStateCreateInfo structure must be set to VK_FALSE.");
1609         }
1610 
1611         if (!IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_BIAS) &&
1612             (pPipeline->graphicsPipelineCI.pRasterizationState->depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
1613             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1614                             HandleToUint64(device), kVUID_Core_DrawState_InvalidFeature,
1615                             "vkCreateGraphicsPipelines(): the depthBiasClamp device feature is disabled: the depthBiasClamp member "
1616                             "of the VkPipelineRasterizationStateCreateInfo structure must be set to 0.0 unless the "
1617                             "VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state is enabled");
1618         }
1619 
1620         // If rasterization is enabled...
1621         if (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) {
1622             if ((pPipeline->graphicsPipelineCI.pMultisampleState->alphaToOneEnable == VK_TRUE) &&
1623                 (!enabled_features.core.alphaToOne)) {
1624                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1625                                 HandleToUint64(device), "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785",
1626                                 "vkCreateGraphicsPipelines(): the alphaToOne device feature is disabled: the alphaToOneEnable "
1627                                 "member of the VkPipelineMultisampleStateCreateInfo structure must be set to VK_FALSE.");
1628             }
1629 
1630             // If subpass uses a depth/stencil attachment, pDepthStencilState must be a pointer to a valid structure
1631             if (subpass_desc && subpass_desc->pDepthStencilAttachment &&
1632                 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1633                 if (!pPipeline->graphicsPipelineCI.pDepthStencilState) {
1634                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1635                                     HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752",
1636                                     "Invalid Pipeline CreateInfo State: pDepthStencilState is NULL when rasterization is enabled "
1637                                     "and subpass uses a depth/stencil attachment.");
1638 
1639                 } else if ((pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) &&
1640                            (!enabled_features.core.depthBounds)) {
1641                     skip |=
1642                         log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1643                                 HandleToUint64(device), "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598",
1644                                 "vkCreateGraphicsPipelines(): the depthBounds device feature is disabled: the "
1645                                 "depthBoundsTestEnable member of the VkPipelineDepthStencilStateCreateInfo structure must be "
1646                                 "set to VK_FALSE.");
1647                 }
1648             }
1649 
1650             // If subpass uses color attachments, pColorBlendState must be valid pointer
1651             if (subpass_desc) {
1652                 uint32_t color_attachment_count = 0;
1653                 for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
1654                     if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
1655                         ++color_attachment_count;
1656                     }
1657                 }
1658                 if (color_attachment_count > 0 && pPipeline->graphicsPipelineCI.pColorBlendState == nullptr) {
1659                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1660                                     HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753",
1661                                     "Invalid Pipeline CreateInfo State: pColorBlendState is NULL when rasterization is enabled and "
1662                                     "subpass uses color attachments.");
1663                 }
1664             }
1665         }
1666     }
1667 
1668     if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pVertexInputState) {
1669         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
1670                         "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
1671                         "Invalid Pipeline CreateInfo State: Missing pVertexInputState.");
1672     }
1673 
1674     auto vi = pPipeline->graphicsPipelineCI.pVertexInputState;
1675     if (vi != NULL) {
1676         for (uint32_t j = 0; j < vi->vertexAttributeDescriptionCount; j++) {
1677             VkFormat format = vi->pVertexAttributeDescriptions[j].format;
1678             // Internal call to get format info.  Still goes through layers, could potentially go directly to ICD.
1679             VkFormatProperties properties;
1680             DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &properties);
1681             if ((properties.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) == 0) {
1682                 skip |=
1683                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1684                             HandleToUint64(device), "VUID-VkVertexInputAttributeDescription-format-00623",
1685                             "vkCreateGraphicsPipelines: pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptions[%d].format "
1686                             "(%s) is not a supported vertex buffer format.",
1687                             pipelineIndex, j, string_VkFormat(format));
1688             }
1689         }
1690     }
1691 
1692     if (pPipeline->graphicsPipelineCI.pMultisampleState) {
1693         auto accumColorSamples = [subpass_desc, pPipeline](uint32_t &samples) {
1694             for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; i++) {
1695                 const auto attachment = subpass_desc->pColorAttachments[i].attachment;
1696                 if (attachment != VK_ATTACHMENT_UNUSED) {
1697                     samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1698                 }
1699             }
1700         };
1701 
1702         if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples)) {
1703             uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
1704             uint32_t subpass_num_samples = 0;
1705 
1706             accumColorSamples(subpass_num_samples);
1707 
1708             if (subpass_desc->pDepthStencilAttachment &&
1709                 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1710                 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
1711                 subpass_num_samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1712             }
1713 
1714             // subpass_num_samples is 0 when the subpass has no attachments or if all attachments are VK_ATTACHMENT_UNUSED.
1715             // Only validate the value of subpass_num_samples if the subpass has attachments that are not VK_ATTACHMENT_UNUSED.
1716             if (subpass_num_samples && (!IsPowerOfTwo(subpass_num_samples) || (subpass_num_samples != raster_samples))) {
1717                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1718                                 HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-00757",
1719                                 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
1720                                 "does not match the number of samples of the RenderPass color and/or depth attachment.",
1721                                 pipelineIndex, raster_samples);
1722             }
1723         }
1724 
1725         if (device_extensions.vk_amd_mixed_attachment_samples) {
1726             VkSampleCountFlagBits max_sample_count = static_cast<VkSampleCountFlagBits>(0);
1727             for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
1728                 if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
1729                     max_sample_count = std::max(
1730                         max_sample_count,
1731                         pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pColorAttachments[i].attachment].samples);
1732                 }
1733             }
1734             if (subpass_desc->pDepthStencilAttachment &&
1735                 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1736                 max_sample_count = std::max(
1737                     max_sample_count,
1738                     pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment].samples);
1739             }
1740             if ((pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) &&
1741                 (pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples != max_sample_count)) {
1742                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1743                                 HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01505",
1744                                 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%s) != max "
1745                                 "attachment samples (%s) used in subpass %u.",
1746                                 pipelineIndex,
1747                                 string_VkSampleCountFlagBits(pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples),
1748                                 string_VkSampleCountFlagBits(max_sample_count), pPipeline->graphicsPipelineCI.subpass);
1749             }
1750         }
1751 
1752         if (device_extensions.vk_nv_framebuffer_mixed_samples) {
1753             uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
1754             uint32_t subpass_color_samples = 0;
1755 
1756             accumColorSamples(subpass_color_samples);
1757 
1758             if (subpass_desc->pDepthStencilAttachment &&
1759                 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1760                 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
1761                 const uint32_t subpass_depth_samples =
1762                     static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1763 
1764                 if (pPipeline->graphicsPipelineCI.pDepthStencilState) {
1765                     const bool ds_test_enabled =
1766                         (pPipeline->graphicsPipelineCI.pDepthStencilState->depthTestEnable == VK_TRUE) ||
1767                         (pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) ||
1768                         (pPipeline->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE);
1769 
1770                     if (ds_test_enabled && (!IsPowerOfTwo(subpass_depth_samples) || (raster_samples != subpass_depth_samples))) {
1771                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1772                                         HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01411",
1773                                         "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
1774                                         "does not match the number of samples of the RenderPass depth attachment (%u).",
1775                                         pipelineIndex, raster_samples, subpass_depth_samples);
1776                     }
1777                 }
1778             }
1779 
1780             if (IsPowerOfTwo(subpass_color_samples)) {
1781                 if (raster_samples < subpass_color_samples) {
1782                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1783                                     HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01412",
1784                                     "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
1785                                     "is not greater or equal to the number of samples of the RenderPass color attachment (%u).",
1786                                     pipelineIndex, raster_samples, subpass_color_samples);
1787                 }
1788 
1789                 if (pPipeline->graphicsPipelineCI.pMultisampleState) {
1790                     if ((raster_samples > subpass_color_samples) &&
1791                         (pPipeline->graphicsPipelineCI.pMultisampleState->sampleShadingEnable == VK_TRUE)) {
1792                         skip |=
1793                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1794                                     HandleToUint64(device), "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415",
1795                                     "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->sampleShadingEnable must be "
1796                                     "VK_FALSE when "
1797                                     "pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) is greater than the number of "
1798                                     "samples of the "
1799                                     "subpass color attachment (%u).",
1800                                     pipelineIndex, pipelineIndex, raster_samples, subpass_color_samples);
1801                     }
1802 
1803                     const auto *coverage_modulation_state = lvl_find_in_chain<VkPipelineCoverageModulationStateCreateInfoNV>(
1804                         pPipeline->graphicsPipelineCI.pMultisampleState->pNext);
1805 
1806                     if (coverage_modulation_state && (coverage_modulation_state->coverageModulationTableEnable == VK_TRUE)) {
1807                         if (coverage_modulation_state->coverageModulationTableCount != (raster_samples / subpass_color_samples)) {
1808                             skip |=
1809                                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1810                                         HandleToUint64(device),
1811                                         "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405",
1812                                         "vkCreateGraphicsPipelines: pCreateInfos[%d] VkPipelineCoverageModulationStateCreateInfoNV "
1813                                         "coverageModulationTableCount of %u is invalid.",
1814                                         pipelineIndex, coverage_modulation_state->coverageModulationTableCount);
1815                         }
1816                     }
1817                 }
1818             }
1819         }
1820 
1821         if (device_extensions.vk_nv_fragment_coverage_to_color) {
1822             const auto coverage_to_color_state =
1823                 lvl_find_in_chain<VkPipelineCoverageToColorStateCreateInfoNV>(pPipeline->graphicsPipelineCI.pMultisampleState);
1824 
1825             if (coverage_to_color_state && coverage_to_color_state->coverageToColorEnable == VK_TRUE) {
1826                 bool attachment_is_valid = false;
1827                 std::string error_detail;
1828 
1829                 if (coverage_to_color_state->coverageToColorLocation < subpass_desc->colorAttachmentCount) {
1830                     const auto color_attachment_ref =
1831                         subpass_desc->pColorAttachments[coverage_to_color_state->coverageToColorLocation];
1832                     if (color_attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
1833                         const auto color_attachment = pPipeline->rp_state->createInfo.pAttachments[color_attachment_ref.attachment];
1834 
1835                         switch (color_attachment.format) {
1836                             case VK_FORMAT_R8_UINT:
1837                             case VK_FORMAT_R8_SINT:
1838                             case VK_FORMAT_R16_UINT:
1839                             case VK_FORMAT_R16_SINT:
1840                             case VK_FORMAT_R32_UINT:
1841                             case VK_FORMAT_R32_SINT:
1842                                 attachment_is_valid = true;
1843                                 break;
1844                             default:
1845                                 string_sprintf(&error_detail, "references an attachment with an invalid format (%s).",
1846                                                string_VkFormat(color_attachment.format));
1847                                 break;
1848                         }
1849                     } else {
1850                         string_sprintf(&error_detail,
1851                                        "references an invalid attachment. The subpass pColorAttachments[%" PRIu32
1852                                        "].attachment has the value "
1853                                        "VK_ATTACHMENT_UNUSED.",
1854                                        coverage_to_color_state->coverageToColorLocation);
1855                     }
1856                 } else {
1857                     string_sprintf(&error_detail,
1858                                    "references an non-existing attachment since the subpass colorAttachmentCount is %" PRIu32 ".",
1859                                    subpass_desc->colorAttachmentCount);
1860                 }
1861 
1862                 if (!attachment_is_valid) {
1863                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
1864                                     HandleToUint64(device),
1865                                     "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
1866                                     "vkCreateGraphicsPipelines: pCreateInfos[%" PRId32
1867                                     "].pMultisampleState VkPipelineCoverageToColorStateCreateInfoNV "
1868                                     "coverageToColorLocation = %" PRIu32 " %s",
1869                                     pipelineIndex, coverage_to_color_state->coverageToColorLocation, error_detail.c_str());
1870                 }
1871             }
1872         }
1873     }
1874 
1875     return skip;
1876 }
1877 
1878 // Block of code at start here specifically for managing/tracking DSs
1879 
1880 // Validate that given set is valid and that it's not being used by an in-flight CmdBuffer
1881 // func_str is the name of the calling function
1882 // Return false if no errors occur
1883 // Return true if validation error occurs and callback returns true (to skip upcoming API call down the chain)
ValidateIdleDescriptorSet(VkDescriptorSet set,const char * func_str)1884 bool CoreChecks::ValidateIdleDescriptorSet(VkDescriptorSet set, const char *func_str) {
1885     if (disabled.idle_descriptor_set) return false;
1886     bool skip = false;
1887     auto set_node = setMap.find(set);
1888     if (set_node == setMap.end()) {
1889         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1890                         HandleToUint64(set), kVUID_Core_DrawState_DoubleDestroy,
1891                         "Cannot call %s() on %s that has not been allocated.", func_str, report_data->FormatHandle(set).c_str());
1892     } else {
1893         // TODO : This covers various error cases so should pass error enum into this function and use passed in enum here
1894         if (set_node->second->in_use.load()) {
1895             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1896                             HandleToUint64(set), "VUID-vkFreeDescriptorSets-pDescriptorSets-00309",
1897                             "Cannot call %s() on %s that is in use by a command buffer.", func_str,
1898                             report_data->FormatHandle(set).c_str());
1899         }
1900     }
1901     return skip;
1902 }
1903 
1904 // Remove set from setMap and delete the set
FreeDescriptorSet(cvdescriptorset::DescriptorSet * descriptor_set)1905 void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
1906     setMap.erase(descriptor_set->GetSet());
1907 }
1908 
1909 // Free all DS Pools including their Sets & related sub-structs
1910 // NOTE : Calls to this function should be wrapped in mutex
DeleteDescriptorSetPools()1911 void ValidationStateTracker::DeleteDescriptorSetPools() {
1912     for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1913         // Remove this pools' sets from setMap and delete them
1914         for (auto ds : ii->second->sets) {
1915             FreeDescriptorSet(ds);
1916         }
1917         ii->second->sets.clear();
1918         ii = descriptorPoolMap.erase(ii);
1919     }
1920 }
1921 
1922 // If a renderpass is active, verify that the given command type is appropriate for current subpass state
ValidateCmdSubpassState(const CMD_BUFFER_STATE * pCB,const CMD_TYPE cmd_type) const1923 bool CoreChecks::ValidateCmdSubpassState(const CMD_BUFFER_STATE *pCB, const CMD_TYPE cmd_type) const {
1924     if (!pCB->activeRenderPass) return false;
1925     bool skip = false;
1926     if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS &&
1927         (cmd_type != CMD_EXECUTECOMMANDS && cmd_type != CMD_NEXTSUBPASS && cmd_type != CMD_ENDRENDERPASS &&
1928          cmd_type != CMD_NEXTSUBPASS2KHR && cmd_type != CMD_ENDRENDERPASS2KHR)) {
1929         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1930                         HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
1931                         "Commands cannot be called in a subpass using secondary command buffers.");
1932     } else if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_INLINE && cmd_type == CMD_EXECUTECOMMANDS) {
1933         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1934                         HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
1935                         "vkCmdExecuteCommands() cannot be called in a subpass using inline commands.");
1936     }
1937     return skip;
1938 }
1939 
ValidateCmdQueueFlags(const CMD_BUFFER_STATE * cb_node,const char * caller_name,VkQueueFlags required_flags,const char * error_code) const1940 bool CoreChecks::ValidateCmdQueueFlags(const CMD_BUFFER_STATE *cb_node, const char *caller_name, VkQueueFlags required_flags,
1941                                        const char *error_code) const {
1942     auto pool = GetCommandPoolState(cb_node->createInfo.commandPool);
1943     if (pool) {
1944         VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pool->queueFamilyIndex].queueFlags;
1945         if (!(required_flags & queue_flags)) {
1946             string required_flags_string;
1947             for (auto flag : {VK_QUEUE_TRANSFER_BIT, VK_QUEUE_GRAPHICS_BIT, VK_QUEUE_COMPUTE_BIT}) {
1948                 if (flag & required_flags) {
1949                     if (required_flags_string.size()) {
1950                         required_flags_string += " or ";
1951                     }
1952                     required_flags_string += string_VkQueueFlagBits(flag);
1953                 }
1954             }
1955             return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1956                            HandleToUint64(cb_node->commandBuffer), error_code,
1957                            "Cannot call %s on a command buffer allocated from a pool without %s capabilities..", caller_name,
1958                            required_flags_string.c_str());
1959         }
1960     }
1961     return false;
1962 }
1963 
GetCauseStr(VulkanTypedHandle obj)1964 static char const *GetCauseStr(VulkanTypedHandle obj) {
1965     if (obj.type == kVulkanObjectTypeDescriptorSet) return "destroyed or updated";
1966     if (obj.type == kVulkanObjectTypeCommandBuffer) return "destroyed or rerecorded";
1967     return "destroyed";
1968 }
1969 
ReportInvalidCommandBuffer(const CMD_BUFFER_STATE * cb_state,const char * call_source) const1970 bool CoreChecks::ReportInvalidCommandBuffer(const CMD_BUFFER_STATE *cb_state, const char *call_source) const {
1971     bool skip = false;
1972     for (auto obj : cb_state->broken_bindings) {
1973         const char *cause_str = GetCauseStr(obj);
1974         string VUID;
1975         string_sprintf(&VUID, "%s-%s", kVUID_Core_DrawState_InvalidCommandBuffer, object_string[obj.type]);
1976         skip |=
1977             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1978                     HandleToUint64(cb_state->commandBuffer), VUID.c_str(),
1979                     "You are adding %s to %s that is invalid because bound %s was %s.", call_source,
1980                     report_data->FormatHandle(cb_state->commandBuffer).c_str(), report_data->FormatHandle(obj).c_str(), cause_str);
1981     }
1982     return skip;
1983 }
1984 
1985 // 'commandBuffer must be in the recording state' valid usage error code for each command
1986 // Autogenerated as part of the vk_validation_error_message.h codegen
1987 static const std::array<const char *, CMD_RANGE_SIZE> must_be_recording_list = {{VUID_MUST_BE_RECORDING_LIST}};
1988 
1989 // Validate the given command being added to the specified cmd buffer, flagging errors if CB is not in the recording state or if
1990 // there's an issue with the Cmd ordering
ValidateCmd(const CMD_BUFFER_STATE * cb_state,const CMD_TYPE cmd,const char * caller_name) const1991 bool CoreChecks::ValidateCmd(const CMD_BUFFER_STATE *cb_state, const CMD_TYPE cmd, const char *caller_name) const {
1992     switch (cb_state->state) {
1993         case CB_RECORDING:
1994             return ValidateCmdSubpassState(cb_state, cmd);
1995 
1996         case CB_INVALID_COMPLETE:
1997         case CB_INVALID_INCOMPLETE:
1998             return ReportInvalidCommandBuffer(cb_state, caller_name);
1999 
2000         default:
2001             assert(cmd != CMD_NONE);
2002             const auto error = must_be_recording_list[cmd];
2003             return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2004                            HandleToUint64(cb_state->commandBuffer), error,
2005                            "You must call vkBeginCommandBuffer() before this call to %s.", caller_name);
2006     }
2007 }
2008 
ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask,VkDebugReportObjectTypeEXT VUID_handle_type,uint64_t VUID_handle,const char * VUID) const2009 bool CoreChecks::ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type,
2010                                                          uint64_t VUID_handle, const char *VUID) const {
2011     bool skip = false;
2012     uint32_t count = 1 << physical_device_count;
2013     if (count <= deviceMask) {
2014         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
2015                         "deviceMask(0x%" PRIx32 ") is invaild. Physical device count is %" PRIu32 ".", deviceMask,
2016                         physical_device_count);
2017     }
2018     return skip;
2019 }
2020 
ValidateDeviceMaskToZero(uint32_t deviceMask,VkDebugReportObjectTypeEXT VUID_handle_type,uint64_t VUID_handle,const char * VUID) const2021 bool CoreChecks::ValidateDeviceMaskToZero(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
2022                                           const char *VUID) const {
2023     bool skip = false;
2024     if (deviceMask == 0) {
2025         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
2026                         "deviceMask(0x%" PRIx32 ") must be non-zero.", deviceMask);
2027     }
2028     return skip;
2029 }
2030 
ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE * pCB,uint32_t deviceMask,VkDebugReportObjectTypeEXT VUID_handle_type,uint64_t VUID_handle,const char * VUID) const2031 bool CoreChecks::ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask,
2032                                                    VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
2033                                                    const char *VUID) const {
2034     bool skip = false;
2035     if ((deviceMask & pCB->initial_device_mask) != deviceMask) {
2036         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
2037                         "deviceMask(0x%" PRIx32 ") is not a subset of %s initial device mask(0x%" PRIx32 ").", deviceMask,
2038                         report_data->FormatHandle(pCB->commandBuffer).c_str(), pCB->initial_device_mask);
2039     }
2040     return skip;
2041 }
2042 
ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE * pCB,uint32_t deviceMask,VkDebugReportObjectTypeEXT VUID_handle_type,uint64_t VUID_handle,const char * VUID)2043 bool CoreChecks::ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask,
2044                                                 VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
2045                                                 const char *VUID) {
2046     bool skip = false;
2047     if ((deviceMask & pCB->active_render_pass_device_mask) != deviceMask) {
2048         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
2049                         "deviceMask(0x%" PRIx32 ") is not a subset of %s device mask(0x%" PRIx32 ").", deviceMask,
2050                         report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), pCB->active_render_pass_device_mask);
2051     }
2052     return skip;
2053 }
2054 
2055 // For given object struct return a ptr of BASE_NODE type for its wrapping struct
GetStateStructPtrFromObject(const VulkanTypedHandle & object_struct)2056 BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
2057     BASE_NODE *base_ptr = nullptr;
2058     switch (object_struct.type) {
2059         case kVulkanObjectTypeDescriptorSet: {
2060             base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
2061             break;
2062         }
2063         case kVulkanObjectTypeSampler: {
2064             base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
2065             break;
2066         }
2067         case kVulkanObjectTypeQueryPool: {
2068             base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
2069             break;
2070         }
2071         case kVulkanObjectTypePipeline: {
2072             base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
2073             break;
2074         }
2075         case kVulkanObjectTypeBuffer: {
2076             base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
2077             break;
2078         }
2079         case kVulkanObjectTypeBufferView: {
2080             base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
2081             break;
2082         }
2083         case kVulkanObjectTypeImage: {
2084             base_ptr = GetImageState(object_struct.Cast<VkImage>());
2085             break;
2086         }
2087         case kVulkanObjectTypeImageView: {
2088             base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
2089             break;
2090         }
2091         case kVulkanObjectTypeEvent: {
2092             base_ptr = GetEventState(object_struct.Cast<VkEvent>());
2093             break;
2094         }
2095         case kVulkanObjectTypeDescriptorPool: {
2096             base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
2097             break;
2098         }
2099         case kVulkanObjectTypeCommandPool: {
2100             base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
2101             break;
2102         }
2103         case kVulkanObjectTypeFramebuffer: {
2104             base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
2105             break;
2106         }
2107         case kVulkanObjectTypeRenderPass: {
2108             base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
2109             break;
2110         }
2111         case kVulkanObjectTypeDeviceMemory: {
2112             base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
2113             break;
2114         }
2115         case kVulkanObjectTypeAccelerationStructureNV: {
2116             base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
2117             break;
2118         }
2119         default:
2120             // TODO : Any other objects to be handled here?
2121             assert(0);
2122             break;
2123     }
2124     return base_ptr;
2125 }
2126 
2127 // Tie the VulkanTypedHandle to the cmd buffer which includes:
2128 //  Add object_binding to cmd buffer
2129 //  Add cb_binding to object
AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE * > * cb_bindings,const VulkanTypedHandle & obj,CMD_BUFFER_STATE * cb_node)2130 void ValidationStateTracker::AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE *> *cb_bindings,
2131                                                      const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
2132     if (disabled.command_buffer_state) {
2133         return;
2134     }
2135     cb_bindings->insert(cb_node);
2136     cb_node->object_bindings.insert(obj);
2137 }
2138 // For a given object, if cb_node is in that objects cb_bindings, remove cb_node
RemoveCommandBufferBinding(VulkanTypedHandle const & object,CMD_BUFFER_STATE * cb_node)2139 void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
2140     BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
2141     if (base_obj) base_obj->cb_bindings.erase(cb_node);
2142 }
2143 // Reset the command buffer state
2144 //  Maintain the createInfo and set state to CB_NEW, but clear all other state
ResetCommandBufferState(const VkCommandBuffer cb)2145 void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
2146     CMD_BUFFER_STATE *pCB = GetCBState(cb);
2147     if (pCB) {
2148         pCB->in_use.store(0);
2149         // Reset CB state (note that createInfo is not cleared)
2150         pCB->commandBuffer = cb;
2151         memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
2152         memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
2153         pCB->hasDrawCmd = false;
2154         pCB->hasTraceRaysCmd = false;
2155         pCB->hasDispatchCmd = false;
2156         pCB->state = CB_NEW;
2157         pCB->submitCount = 0;
2158         pCB->image_layout_change_count = 1;  // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
2159         pCB->status = 0;
2160         pCB->static_status = 0;
2161         pCB->viewportMask = 0;
2162         pCB->scissorMask = 0;
2163 
2164         for (auto &item : pCB->lastBound) {
2165             item.second.reset();
2166         }
2167 
2168         memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
2169         pCB->activeRenderPass = nullptr;
2170         pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
2171         pCB->activeSubpass = 0;
2172         pCB->broken_bindings.clear();
2173         pCB->waitedEvents.clear();
2174         pCB->events.clear();
2175         pCB->writeEventsBeforeWait.clear();
2176         pCB->queryToStateMap.clear();
2177         pCB->activeQueries.clear();
2178         pCB->startedQueries.clear();
2179         pCB->image_layout_map.clear();
2180         pCB->eventToStageMap.clear();
2181         pCB->cb_vertex_buffer_binding_info.clear();
2182         pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
2183         pCB->vertex_buffer_used = false;
2184         pCB->primaryCommandBuffer = VK_NULL_HANDLE;
2185         // If secondary, invalidate any primary command buffer that may call us.
2186         if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2187             InvalidateCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
2188         }
2189 
2190         // Remove reverse command buffer links.
2191         for (auto pSubCB : pCB->linkedCommandBuffers) {
2192             pSubCB->linkedCommandBuffers.erase(pCB);
2193         }
2194         pCB->linkedCommandBuffers.clear();
2195         ClearCmdBufAndMemReferences(pCB);
2196         pCB->queue_submit_functions.clear();
2197         pCB->cmd_execute_commands_functions.clear();
2198         pCB->eventUpdates.clear();
2199         pCB->queryUpdates.clear();
2200 
2201         // Remove object bindings
2202         for (const auto &obj : pCB->object_bindings) {
2203             RemoveCommandBufferBinding(obj, pCB);
2204         }
2205         pCB->object_bindings.clear();
2206         // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
2207         for (auto framebuffer : pCB->framebuffers) {
2208             auto fb_state = GetFramebufferState(framebuffer);
2209             if (fb_state) fb_state->cb_bindings.erase(pCB);
2210         }
2211         pCB->framebuffers.clear();
2212         pCB->activeFramebuffer = VK_NULL_HANDLE;
2213         memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
2214 
2215         pCB->qfo_transfer_image_barriers.Reset();
2216         pCB->qfo_transfer_buffer_barriers.Reset();
2217 
2218         // Clean up the label data
2219         ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
2220         pCB->debug_label.Reset();
2221     }
2222     if (command_buffer_reset_callback) {
2223         (*command_buffer_reset_callback)(cb);
2224     }
2225 }
2226 
MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const * ds)2227 CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2228     // initially assume everything is static state
2229     CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2230 
2231     if (ds) {
2232         for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2233             switch (ds->pDynamicStates[i]) {
2234                 case VK_DYNAMIC_STATE_LINE_WIDTH:
2235                     flags &= ~CBSTATUS_LINE_WIDTH_SET;
2236                     break;
2237                 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2238                     flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2239                     break;
2240                 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2241                     flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2242                     break;
2243                 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2244                     flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2245                     break;
2246                 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2247                     flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2248                     break;
2249                 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2250                     flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2251                     break;
2252                 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2253                     flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2254                     break;
2255                 case VK_DYNAMIC_STATE_SCISSOR:
2256                     flags &= ~CBSTATUS_SCISSOR_SET;
2257                     break;
2258                 case VK_DYNAMIC_STATE_VIEWPORT:
2259                     flags &= ~CBSTATUS_VIEWPORT_SET;
2260                     break;
2261                 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2262                     flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2263                     break;
2264                 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2265                     flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2266                     break;
2267                 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2268                     flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2269                     break;
2270                 default:
2271                     break;
2272             }
2273         }
2274     }
2275 
2276     return flags;
2277 }
2278 
2279 // Flags validation error if the associated call is made inside a render pass. The apiName routine should ONLY be called outside a
2280 // render pass.
InsideRenderPass(const CMD_BUFFER_STATE * pCB,const char * apiName,const char * msgCode) const2281 bool CoreChecks::InsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
2282     bool inside = false;
2283     if (pCB->activeRenderPass) {
2284         inside = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2285                          HandleToUint64(pCB->commandBuffer), msgCode, "%s: It is invalid to issue this call inside an active %s.",
2286                          apiName, report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str());
2287     }
2288     return inside;
2289 }
2290 
2291 // Flags validation error if the associated call is made outside a render pass. The apiName
2292 // routine should ONLY be called inside a render pass.
OutsideRenderPass(const CMD_BUFFER_STATE * pCB,const char * apiName,const char * msgCode) const2293 bool CoreChecks::OutsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
2294     bool outside = false;
2295     if (((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) && (!pCB->activeRenderPass)) ||
2296         ((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) && (!pCB->activeRenderPass) &&
2297          !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT))) {
2298         outside = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2299                           HandleToUint64(pCB->commandBuffer), msgCode, "%s: This call must be issued inside an active render pass.",
2300                           apiName);
2301     }
2302     return outside;
2303 }
2304 
InitGpuValidation()2305 void CoreChecks::InitGpuValidation() {
2306     // Process the layer settings file.
2307     enum CoreValidationGpuFlagBits {
2308         CORE_VALIDATION_GPU_VALIDATION_ALL_BIT = 0x00000001,
2309         CORE_VALIDATION_GPU_VALIDATION_RESERVE_BINDING_SLOT_BIT = 0x00000002,
2310     };
2311     typedef VkFlags CoreGPUFlags;
2312     static const std::unordered_map<std::string, VkFlags> gpu_flags_option_definitions = {
2313         {std::string("all"), CORE_VALIDATION_GPU_VALIDATION_ALL_BIT},
2314         {std::string("reserve_binding_slot"), CORE_VALIDATION_GPU_VALIDATION_RESERVE_BINDING_SLOT_BIT},
2315     };
2316     std::string gpu_flags_key = "lunarg_core_validation.gpu_validation";
2317     CoreGPUFlags gpu_flags = GetLayerOptionFlags(gpu_flags_key, gpu_flags_option_definitions, 0);
2318     gpu_flags_key = "khronos_validation.gpu_validation";
2319     gpu_flags |= GetLayerOptionFlags(gpu_flags_key, gpu_flags_option_definitions, 0);
2320     if (gpu_flags & CORE_VALIDATION_GPU_VALIDATION_ALL_BIT) {
2321         instance_state->enabled.gpu_validation = true;
2322     }
2323     if (gpu_flags & CORE_VALIDATION_GPU_VALIDATION_RESERVE_BINDING_SLOT_BIT) {
2324         instance_state->enabled.gpu_validation_reserve_binding_slot = true;
2325     }
2326 }
2327 
PostCallRecordCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance,VkResult result)2328 void CoreChecks::PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
2329                                               VkInstance *pInstance, VkResult result) {
2330     if (VK_SUCCESS != result) return;
2331     InitGpuValidation();
2332 }
2333 
ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE * pd_state,uint32_t requested_queue_family,const char * err_code,const char * cmd_name,const char * queue_family_var_name)2334 bool CoreChecks::ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t requested_queue_family,
2335                                           const char *err_code, const char *cmd_name, const char *queue_family_var_name) {
2336     bool skip = false;
2337 
2338     if (requested_queue_family >= pd_state->queue_family_known_count) {
2339         const char *conditional_ext_cmd =
2340             instance_extensions.vk_khr_get_physical_device_properties_2 ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]" : "";
2341 
2342         const std::string count_note = (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
2343                                            ? "the pQueueFamilyPropertyCount was never obtained"
2344                                            : "i.e. is not less than " + std::to_string(pd_state->queue_family_known_count);
2345 
2346         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
2347                         HandleToUint64(pd_state->phys_device), err_code,
2348                         "%s: %s (= %" PRIu32
2349                         ") is not less than any previously obtained pQueueFamilyPropertyCount from "
2350                         "vkGetPhysicalDeviceQueueFamilyProperties%s (%s).",
2351                         cmd_name, queue_family_var_name, requested_queue_family, conditional_ext_cmd, count_note.c_str());
2352     }
2353     return skip;
2354 }
2355 
2356 // Verify VkDeviceQueueCreateInfos
ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE * pd_state,uint32_t info_count,const VkDeviceQueueCreateInfo * infos)2357 bool CoreChecks::ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t info_count,
2358                                                 const VkDeviceQueueCreateInfo *infos) {
2359     bool skip = false;
2360 
2361     std::unordered_set<uint32_t> queue_family_set;
2362 
2363     for (uint32_t i = 0; i < info_count; ++i) {
2364         const auto requested_queue_family = infos[i].queueFamilyIndex;
2365 
2366         std::string queue_family_var_name = "pCreateInfo->pQueueCreateInfos[" + std::to_string(i) + "].queueFamilyIndex";
2367         skip |= ValidateQueueFamilyIndex(pd_state, requested_queue_family, "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381",
2368                                          "vkCreateDevice", queue_family_var_name.c_str());
2369 
2370         if (queue_family_set.insert(requested_queue_family).second == false) {
2371             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
2372                             HandleToUint64(pd_state->phys_device), "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372",
2373                             "CreateDevice(): %s (=%" PRIu32 ") is not unique within pQueueCreateInfos.",
2374                             queue_family_var_name.c_str(), requested_queue_family);
2375         }
2376 
2377         // Verify that requested queue count of queue family is known to be valid at this point in time
2378         if (requested_queue_family < pd_state->queue_family_known_count) {
2379             const auto requested_queue_count = infos[i].queueCount;
2380             const bool queue_family_has_props = requested_queue_family < pd_state->queue_family_properties.size();
2381             // spec guarantees at least one queue for each queue family
2382             const uint32_t available_queue_count =
2383                 queue_family_has_props ? pd_state->queue_family_properties[requested_queue_family].queueCount : 1;
2384             const char *conditional_ext_cmd = instance_extensions.vk_khr_get_physical_device_properties_2
2385                                                   ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]"
2386                                                   : "";
2387 
2388             if (requested_queue_count > available_queue_count) {
2389                 const std::string count_note =
2390                     queue_family_has_props
2391                         ? "i.e. is not less than or equal to " +
2392                               std::to_string(pd_state->queue_family_properties[requested_queue_family].queueCount)
2393                         : "the pQueueFamilyProperties[" + std::to_string(requested_queue_family) + "] was never obtained";
2394 
2395                 skip |= log_msg(
2396                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
2397                     HandleToUint64(pd_state->phys_device), "VUID-VkDeviceQueueCreateInfo-queueCount-00382",
2398                     "vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32 "].queueCount (=%" PRIu32
2399                     ") is not less than or equal to available queue count for this pCreateInfo->pQueueCreateInfos[%" PRIu32
2400                     "].queueFamilyIndex} (=%" PRIu32 ") obtained previously from vkGetPhysicalDeviceQueueFamilyProperties%s (%s).",
2401                     i, requested_queue_count, i, requested_queue_family, conditional_ext_cmd, count_note.c_str());
2402             }
2403         }
2404     }
2405 
2406     return skip;
2407 }
2408 
PreCallValidateCreateDevice(VkPhysicalDevice gpu,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)2409 bool CoreChecks::PreCallValidateCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2410                                              const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
2411     bool skip = false;
2412     auto pd_state = GetPhysicalDeviceState(gpu);
2413 
2414     // TODO: object_tracker should perhaps do this instead
2415     //       and it does not seem to currently work anyway -- the loader just crashes before this point
2416     if (!pd_state) {
2417         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
2418                         kVUID_Core_DevLimit_MustQueryCount,
2419                         "Invalid call to vkCreateDevice() w/o first calling vkEnumeratePhysicalDevices().");
2420     } else {
2421         skip |= ValidateDeviceQueueCreateInfos(pd_state, pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos);
2422     }
2423     return skip;
2424 }
2425 
PreCallRecordCreateDevice(VkPhysicalDevice gpu,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice,safe_VkDeviceCreateInfo * modified_create_info)2426 void CoreChecks::PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2427                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
2428                                            safe_VkDeviceCreateInfo *modified_create_info) {
2429     // GPU Validation can possibly turn on device features, so give it a chance to change the create info.
2430     if (enabled.gpu_validation) {
2431         VkPhysicalDeviceFeatures supported_features;
2432         DispatchGetPhysicalDeviceFeatures(gpu, &supported_features);
2433         GpuPreCallRecordCreateDevice(gpu, modified_create_info, &supported_features);
2434     }
2435 }
2436 
PostCallRecordCreateDevice(VkPhysicalDevice gpu,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice,VkResult result)2437 void CoreChecks::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2438                                             const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
2439     // The state tracker sets up the device state
2440     StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
2441 
2442     // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker refactor
2443     // would be messier without.
2444     // TODO: Find a good way to do this hooklessly.
2445     ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
2446     ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
2447     CoreChecks *core_checks = static_cast<CoreChecks *>(validation_data);
2448 
2449     if (enabled.gpu_validation) {
2450         // The only CoreCheck specific init is for gpu_validation
2451         core_checks->GpuPostCallRecordCreateDevice(&enabled, pCreateInfo);
2452         core_checks->SetCommandBufferResetCallback(
2453             [core_checks](VkCommandBuffer command_buffer) -> void { core_checks->GpuResetCommandBuffer(command_buffer); });
2454     }
2455     core_checks->SetSetImageViewInitialLayoutCallback(
2456         [core_checks](CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &iv_state, VkImageLayout layout) -> void {
2457             core_checks->SetImageViewInitialLayout(cb_node, iv_state, layout);
2458         });
2459 }
2460 
PostCallRecordCreateDevice(VkPhysicalDevice gpu,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice,VkResult result)2461 void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2462                                                         const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
2463                                                         VkResult result) {
2464     if (VK_SUCCESS != result) return;
2465 
2466     const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
2467     if (nullptr == enabled_features_found) {
2468         const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
2469         if (features2) {
2470             enabled_features_found = &(features2->features);
2471         }
2472     }
2473 
2474     ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
2475     ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
2476     ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
2477 
2478     if (nullptr == enabled_features_found) {
2479         state_tracker->enabled_features.core = {};
2480     } else {
2481         state_tracker->enabled_features.core = *enabled_features_found;
2482     }
2483 
2484     // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
2485     // previously set them through an explicit API call.
2486     uint32_t count;
2487     auto pd_state = GetPhysicalDeviceState(gpu);
2488     DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
2489     pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
2490     DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
2491     // Save local link to this device's physical device state
2492     state_tracker->physical_device_state = pd_state;
2493 
2494     const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
2495     state_tracker->physical_device_count =
2496         device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
2497 
2498     const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
2499     if (descriptor_indexing_features) {
2500         state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
2501     }
2502 
2503     const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
2504     if (eight_bit_storage_features) {
2505         state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
2506     }
2507 
2508     const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
2509     if (exclusive_scissor_features) {
2510         state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
2511     }
2512 
2513     const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
2514     if (shading_rate_image_features) {
2515         state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
2516     }
2517 
2518     const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
2519     if (mesh_shader_features) {
2520         state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
2521     }
2522 
2523     const auto *inline_uniform_block_features =
2524         lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
2525     if (inline_uniform_block_features) {
2526         state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
2527     }
2528 
2529     const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
2530     if (transform_feedback_features) {
2531         state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
2532     }
2533 
2534     const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
2535     if (float16_int8_features) {
2536         state_tracker->enabled_features.float16_int8 = *float16_int8_features;
2537     }
2538 
2539     const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
2540     if (vtx_attrib_div_features) {
2541         state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
2542     }
2543 
2544     const auto *uniform_buffer_standard_layout_features =
2545         lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
2546     if (uniform_buffer_standard_layout_features) {
2547         state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
2548     }
2549 
2550     const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
2551     if (scalar_block_layout_features) {
2552         state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
2553     }
2554 
2555     const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
2556     if (buffer_address) {
2557         state_tracker->enabled_features.buffer_address = *buffer_address;
2558     }
2559 
2560     const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
2561     if (cooperative_matrix_features) {
2562         state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
2563     }
2564 
2565     const auto *float_controls_features = lvl_find_in_chain<VkPhysicalDeviceFloatControlsPropertiesKHR>(pCreateInfo->pNext);
2566     if (float_controls_features) {
2567         state_tracker->enabled_features.float_controls = *float_controls_features;
2568     }
2569 
2570     const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
2571     if (host_query_reset_features) {
2572         state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
2573     }
2574 
2575     const auto *compute_shader_derivatives_features =
2576         lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
2577     if (compute_shader_derivatives_features) {
2578         state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
2579     }
2580 
2581     const auto *fragment_shader_barycentric_features =
2582         lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
2583     if (fragment_shader_barycentric_features) {
2584         state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
2585     }
2586 
2587     const auto *shader_image_footprint_features =
2588         lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
2589     if (shader_image_footprint_features) {
2590         state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
2591     }
2592 
2593     const auto *fragment_shader_interlock_features =
2594         lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
2595     if (fragment_shader_interlock_features) {
2596         state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
2597     }
2598 
2599     const auto *demote_to_helper_invocation_features =
2600         lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
2601     if (demote_to_helper_invocation_features) {
2602         state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
2603     }
2604 
2605     const auto *texel_buffer_alignment_features =
2606         lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
2607     if (texel_buffer_alignment_features) {
2608         state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
2609     }
2610 
2611     const auto *imageless_framebuffer_features =
2612         lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
2613     if (imageless_framebuffer_features) {
2614         state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
2615     }
2616 
2617     const auto *pipeline_exe_props_features =
2618         lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
2619     if (pipeline_exe_props_features) {
2620         state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
2621     }
2622 
2623     // Store physical device properties and physical device mem limits into CoreChecks structs
2624     DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
2625     DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
2626 
2627     const auto &dev_ext = state_tracker->device_extensions;
2628     auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
2629 
2630     if (dev_ext.vk_khr_push_descriptor) {
2631         // Get the needed push_descriptor limits
2632         VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
2633         GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
2634         phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
2635     }
2636 
2637     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
2638     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
2639     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
2640     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
2641     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
2642     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
2643     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
2644     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
2645     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
2646     GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
2647     if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
2648         // Get the needed cooperative_matrix properties
2649         auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
2650         auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
2651         instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
2652         state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
2653 
2654         uint32_t numCooperativeMatrixProperties = 0;
2655         instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
2656         state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
2657                                                             lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
2658 
2659         instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
2660                                                                                state_tracker->cooperative_matrix_properties.data());
2661     }
2662     if (state_tracker->api_version >= VK_API_VERSION_1_1) {
2663         // Get the needed subgroup limits
2664         auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
2665         auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
2666         instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
2667 
2668         state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
2669     }
2670 
2671     // Store queue family data
2672     if ((pCreateInfo != nullptr) && (pCreateInfo->pQueueCreateInfos != nullptr)) {
2673         for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
2674             state_tracker->queue_family_index_map.insert(
2675                 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
2676         }
2677     }
2678 }
2679 
PreCallRecordDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)2680 void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2681     if (!device) return;
2682 
2683     pipelineMap.clear();
2684     renderPassMap.clear();
2685 
2686     // Reset all command buffers before destroying them, to unlink object_bindings.
2687     for (auto &commandBuffer : commandBufferMap) {
2688         ResetCommandBufferState(commandBuffer.first);
2689     }
2690     commandBufferMap.clear();
2691 
2692     // This will also delete all sets in the pool & remove them from setMap
2693     DeleteDescriptorSetPools();
2694     // All sets should be removed
2695     assert(setMap.empty());
2696     descriptorSetLayoutMap.clear();
2697     imageViewMap.clear();
2698     imageMap.clear();
2699     bufferViewMap.clear();
2700     bufferMap.clear();
2701     // Queues persist until device is destroyed
2702     queueMap.clear();
2703     layer_debug_utils_destroy_device(device);
2704 }
PreCallRecordDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)2705 void CoreChecks::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2706     if (!device) return;
2707     if (enabled.gpu_validation) {
2708         GpuPreCallRecordDestroyDevice();
2709     }
2710     imageSubresourceMap.clear();
2711     imageLayoutMap.clear();
2712 
2713     StateTracker::PreCallRecordDestroyDevice(device, pAllocator);
2714 }
2715 
2716 // For given stage mask, if Geometry shader stage is on w/o GS being enabled, report geo_error_id
2717 //   and if Tessellation Control or Evaluation shader stages are on w/o TS being enabled, report tess_error_id.
2718 // Similarly for mesh and task shaders.
ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask,const char * caller,const char * geo_error_id,const char * tess_error_id,const char * mesh_error_id,const char * task_error_id) const2719 bool CoreChecks::ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask, const char *caller, const char *geo_error_id,
2720                                               const char *tess_error_id, const char *mesh_error_id,
2721                                               const char *task_error_id) const {
2722     bool skip = false;
2723     if (!enabled_features.core.geometryShader && (stageMask & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)) {
2724         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, geo_error_id,
2725                         "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when device does not have "
2726                         "geometryShader feature enabled.",
2727                         caller);
2728     }
2729     if (!enabled_features.core.tessellationShader &&
2730         (stageMask & (VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT))) {
2731         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, tess_error_id,
2732                         "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT and/or "
2733                         "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device does not have "
2734                         "tessellationShader feature enabled.",
2735                         caller);
2736     }
2737     if (!enabled_features.mesh_shader.meshShader && (stageMask & VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV)) {
2738         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, mesh_error_id,
2739                         "%s call includes a stageMask with VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV bit set when device does not have "
2740                         "VkPhysicalDeviceMeshShaderFeaturesNV::meshShader feature enabled.",
2741                         caller);
2742     }
2743     if (!enabled_features.mesh_shader.taskShader && (stageMask & VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV)) {
2744         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, task_error_id,
2745                         "%s call includes a stageMask with VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV bit set when device does not have "
2746                         "VkPhysicalDeviceMeshShaderFeaturesNV::taskShader feature enabled.",
2747                         caller);
2748     }
2749     return skip;
2750 }
2751 
2752 // Loop through bound objects and increment their in_use counts.
IncrementBoundObjects(CMD_BUFFER_STATE const * cb_node)2753 void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
2754     for (auto obj : cb_node->object_bindings) {
2755         auto base_obj = GetStateStructPtrFromObject(obj);
2756         if (base_obj) {
2757             base_obj->in_use.fetch_add(1);
2758         }
2759     }
2760 }
2761 // Track which resources are in-flight by atomically incrementing their "in_use" count
IncrementResources(CMD_BUFFER_STATE * cb_node)2762 void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
2763     cb_node->submitCount++;
2764     cb_node->in_use.fetch_add(1);
2765 
2766     // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
2767     IncrementBoundObjects(cb_node);
2768     // TODO : We should be able to remove the NULL look-up checks from the code below as long as
2769     //  all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
2770     //  should then be flagged prior to calling this function
2771     for (auto event : cb_node->writeEventsBeforeWait) {
2772         auto event_state = GetEventState(event);
2773         if (event_state) event_state->write_in_use++;
2774     }
2775 }
2776 
2777 // Note: This function assumes that the global lock is held by the calling thread.
2778 // For the given queue, verify the queue state up to the given seq number.
2779 // Currently the only check is to make sure that if there are events to be waited on prior to
2780 //  a QueryReset, make sure that all such events have been signalled.
VerifyQueueStateToSeq(QUEUE_STATE * initial_queue,uint64_t initial_seq)2781 bool CoreChecks::VerifyQueueStateToSeq(QUEUE_STATE *initial_queue, uint64_t initial_seq) {
2782     bool skip = false;
2783 
2784     // sequence number we want to validate up to, per queue
2785     std::unordered_map<QUEUE_STATE *, uint64_t> target_seqs{{initial_queue, initial_seq}};
2786     // sequence number we've completed validation for, per queue
2787     std::unordered_map<QUEUE_STATE *, uint64_t> done_seqs;
2788     std::vector<QUEUE_STATE *> worklist{initial_queue};
2789 
2790     while (worklist.size()) {
2791         auto queue = worklist.back();
2792         worklist.pop_back();
2793 
2794         auto target_seq = target_seqs[queue];
2795         auto seq = std::max(done_seqs[queue], queue->seq);
2796         auto sub_it = queue->submissions.begin() + int(seq - queue->seq);  // seq >= queue->seq
2797 
2798         for (; seq < target_seq; ++sub_it, ++seq) {
2799             for (auto &wait : sub_it->waitSemaphores) {
2800                 auto other_queue = GetQueueState(wait.queue);
2801 
2802                 if (other_queue == queue) continue;  // semaphores /always/ point backwards, so no point here.
2803 
2804                 auto other_target_seq = std::max(target_seqs[other_queue], wait.seq);
2805                 auto other_done_seq = std::max(done_seqs[other_queue], other_queue->seq);
2806 
2807                 // if this wait is for another queue, and covers new sequence
2808                 // numbers beyond what we've already validated, mark the new
2809                 // target seq and (possibly-re)add the queue to the worklist.
2810                 if (other_done_seq < other_target_seq) {
2811                     target_seqs[other_queue] = other_target_seq;
2812                     worklist.push_back(other_queue);
2813                 }
2814             }
2815         }
2816 
2817         // finally mark the point we've now validated this queue to.
2818         done_seqs[queue] = seq;
2819     }
2820 
2821     return skip;
2822 }
2823 
2824 // When the given fence is retired, verify outstanding queue operations through the point of the fence
VerifyQueueStateToFence(VkFence fence)2825 bool CoreChecks::VerifyQueueStateToFence(VkFence fence) {
2826     auto fence_state = GetFenceState(fence);
2827     if (fence_state && fence_state->scope == kSyncScopeInternal && VK_NULL_HANDLE != fence_state->signaler.first) {
2828         return VerifyQueueStateToSeq(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
2829     }
2830     return false;
2831 }
2832 
2833 // Decrement in-use count for objects bound to command buffer
DecrementBoundResources(CMD_BUFFER_STATE const * cb_node)2834 void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
2835     BASE_NODE *base_obj = nullptr;
2836     for (auto obj : cb_node->object_bindings) {
2837         base_obj = GetStateStructPtrFromObject(obj);
2838         if (base_obj) {
2839             base_obj->in_use.fetch_sub(1);
2840         }
2841     }
2842 }
2843 
RetireWorkOnQueue(QUEUE_STATE * pQueue,uint64_t seq,bool switch_finished_queries)2844 void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq, bool switch_finished_queries) {
2845     std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
2846 
2847     // Roll this queue forward, one submission at a time.
2848     while (pQueue->seq < seq) {
2849         auto &submission = pQueue->submissions.front();
2850 
2851         for (auto &wait : submission.waitSemaphores) {
2852             auto pSemaphore = GetSemaphoreState(wait.semaphore);
2853             if (pSemaphore) {
2854                 pSemaphore->in_use.fetch_sub(1);
2855             }
2856             auto &lastSeq = otherQueueSeqs[wait.queue];
2857             lastSeq = std::max(lastSeq, wait.seq);
2858         }
2859 
2860         for (auto &semaphore : submission.signalSemaphores) {
2861             auto pSemaphore = GetSemaphoreState(semaphore);
2862             if (pSemaphore) {
2863                 pSemaphore->in_use.fetch_sub(1);
2864             }
2865         }
2866 
2867         for (auto &semaphore : submission.externalSemaphores) {
2868             auto pSemaphore = GetSemaphoreState(semaphore);
2869             if (pSemaphore) {
2870                 pSemaphore->in_use.fetch_sub(1);
2871             }
2872         }
2873 
2874         for (auto cb : submission.cbs) {
2875             auto cb_node = GetCBState(cb);
2876             if (!cb_node) {
2877                 continue;
2878             }
2879             // First perform decrement on general case bound objects
2880             DecrementBoundResources(cb_node);
2881             for (auto event : cb_node->writeEventsBeforeWait) {
2882                 auto eventNode = eventMap.find(event);
2883                 if (eventNode != eventMap.end()) {
2884                     eventNode->second.write_in_use--;
2885                 }
2886             }
2887             for (auto queryStatePair : cb_node->queryToStateMap) {
2888                 const QueryState newState =
2889                     ((queryStatePair.second == QUERYSTATE_ENDED && switch_finished_queries) ? QUERYSTATE_AVAILABLE
2890                                                                                             : queryStatePair.second);
2891                 pQueue->queryToStateMap[queryStatePair.first] = newState;
2892                 queryToStateMap[queryStatePair.first] = newState;
2893             }
2894             for (auto eventStagePair : cb_node->eventToStageMap) {
2895                 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
2896             }
2897 
2898             cb_node->in_use.fetch_sub(1);
2899         }
2900 
2901         auto pFence = GetFenceState(submission.fence);
2902         if (pFence && pFence->scope == kSyncScopeInternal) {
2903             pFence->state = FENCE_RETIRED;
2904         }
2905 
2906         pQueue->submissions.pop_front();
2907         pQueue->seq++;
2908     }
2909 
2910     // Roll other queues forward to the highest seq we saw a wait for
2911     for (auto qs : otherQueueSeqs) {
2912         RetireWorkOnQueue(GetQueueState(qs.first), qs.second, switch_finished_queries);
2913     }
2914 }
2915 
2916 // Submit a fence to a queue, delimiting previous fences and previous untracked
2917 // work by it.
SubmitFence(QUEUE_STATE * pQueue,FENCE_STATE * pFence,uint64_t submitCount)2918 static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
2919     pFence->state = FENCE_INFLIGHT;
2920     pFence->signaler.first = pQueue->queue;
2921     pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
2922 }
2923 
ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE * pCB,int current_submit_count) const2924 bool CoreChecks::ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE *pCB, int current_submit_count) const {
2925     bool skip = false;
2926     if ((pCB->in_use.load() || current_submit_count > 1) &&
2927         !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
2928         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
2929                         "VUID-vkQueueSubmit-pCommandBuffers-00071", "%s is already in use and is not marked for simultaneous use.",
2930                         report_data->FormatHandle(pCB->commandBuffer).c_str());
2931     }
2932     return skip;
2933 }
2934 
ValidateCommandBufferState(const CMD_BUFFER_STATE * cb_state,const char * call_source,int current_submit_count,const char * vu_id) const2935 bool CoreChecks::ValidateCommandBufferState(const CMD_BUFFER_STATE *cb_state, const char *call_source, int current_submit_count,
2936                                             const char *vu_id) const {
2937     bool skip = false;
2938     if (disabled.command_buffer_state) return skip;
2939     // Validate ONE_TIME_SUBMIT_BIT CB is not being submitted more than once
2940     if ((cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) &&
2941         (cb_state->submitCount + current_submit_count > 1)) {
2942         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
2943                         kVUID_Core_DrawState_CommandBufferSingleSubmitViolation,
2944                         "%s was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted 0x%" PRIxLEAST64
2945                         "times.",
2946                         report_data->FormatHandle(cb_state->commandBuffer).c_str(), cb_state->submitCount + current_submit_count);
2947     }
2948 
2949     // Validate that cmd buffers have been updated
2950     switch (cb_state->state) {
2951         case CB_INVALID_INCOMPLETE:
2952         case CB_INVALID_COMPLETE:
2953             skip |= ReportInvalidCommandBuffer(cb_state, call_source);
2954             break;
2955 
2956         case CB_NEW:
2957             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2958                             (uint64_t)(cb_state->commandBuffer), vu_id,
2959                             "%s used in the call to %s is unrecorded and contains no commands.",
2960                             report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
2961             break;
2962 
2963         case CB_RECORDING:
2964             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2965                             HandleToUint64(cb_state->commandBuffer), kVUID_Core_DrawState_NoEndCommandBuffer,
2966                             "You must call vkEndCommandBuffer() on %s before this call to %s!",
2967                             report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
2968             break;
2969 
2970         default: /* recorded */
2971             break;
2972     }
2973     return skip;
2974 }
2975 
2976 // Check that the queue family index of 'queue' matches one of the entries in pQueueFamilyIndices
ValidImageBufferQueue(const CMD_BUFFER_STATE * cb_node,const VulkanTypedHandle & object,VkQueue queue,uint32_t count,const uint32_t * indices) const2977 bool CoreChecks::ValidImageBufferQueue(const CMD_BUFFER_STATE *cb_node, const VulkanTypedHandle &object, VkQueue queue,
2978                                        uint32_t count, const uint32_t *indices) const {
2979     bool found = false;
2980     bool skip = false;
2981     auto queue_state = GetQueueState(queue);
2982     if (queue_state) {
2983         for (uint32_t i = 0; i < count; i++) {
2984             if (indices[i] == queue_state->queueFamilyIndex) {
2985                 found = true;
2986                 break;
2987             }
2988         }
2989 
2990         if (!found) {
2991             skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object.type], object.handle,
2992                            kVUID_Core_DrawState_InvalidQueueFamily,
2993                            "vkQueueSubmit: %s contains %s which was not created allowing concurrent access to "
2994                            "this queue family %d.",
2995                            report_data->FormatHandle(cb_node->commandBuffer).c_str(), report_data->FormatHandle(object).c_str(),
2996                            queue_state->queueFamilyIndex);
2997         }
2998     }
2999     return skip;
3000 }
3001 
3002 // Validate that queueFamilyIndices of primary command buffers match this queue
3003 // Secondary command buffers were previously validated in vkCmdExecuteCommands().
ValidateQueueFamilyIndices(const CMD_BUFFER_STATE * pCB,VkQueue queue) const3004 bool CoreChecks::ValidateQueueFamilyIndices(const CMD_BUFFER_STATE *pCB, VkQueue queue) const {
3005     bool skip = false;
3006     auto pPool = GetCommandPoolState(pCB->createInfo.commandPool);
3007     auto queue_state = GetQueueState(queue);
3008 
3009     if (pPool && queue_state) {
3010         if (pPool->queueFamilyIndex != queue_state->queueFamilyIndex) {
3011             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3012                             HandleToUint64(pCB->commandBuffer), "VUID-vkQueueSubmit-pCommandBuffers-00074",
3013                             "vkQueueSubmit: Primary %s created in queue family %d is being submitted on %s "
3014                             "from queue family %d.",
3015                             report_data->FormatHandle(pCB->commandBuffer).c_str(), pPool->queueFamilyIndex,
3016                             report_data->FormatHandle(queue).c_str(), queue_state->queueFamilyIndex);
3017         }
3018 
3019         // Ensure that any bound images or buffers created with SHARING_MODE_CONCURRENT have access to the current queue family
3020         for (const auto &object : pCB->object_bindings) {
3021             if (object.type == kVulkanObjectTypeImage) {
3022                 auto image_state = GetImageState(object.Cast<VkImage>());
3023                 if (image_state && image_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
3024                     skip |= ValidImageBufferQueue(pCB, object, queue, image_state->createInfo.queueFamilyIndexCount,
3025                                                   image_state->createInfo.pQueueFamilyIndices);
3026                 }
3027             } else if (object.type == kVulkanObjectTypeBuffer) {
3028                 auto buffer_state = GetBufferState(object.Cast<VkBuffer>());
3029                 if (buffer_state && buffer_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
3030                     skip |= ValidImageBufferQueue(pCB, object, queue, buffer_state->createInfo.queueFamilyIndexCount,
3031                                                   buffer_state->createInfo.pQueueFamilyIndices);
3032                 }
3033             }
3034         }
3035     }
3036 
3037     return skip;
3038 }
3039 
ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE * pCB,int current_submit_count,QFOTransferCBScoreboards<VkImageMemoryBarrier> * qfo_image_scoreboards,QFOTransferCBScoreboards<VkBufferMemoryBarrier> * qfo_buffer_scoreboards) const3040 bool CoreChecks::ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE *pCB, int current_submit_count,
3041                                                    QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
3042                                                    QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) const {
3043     // Track in-use for resources off of primary and any secondary CBs
3044     bool skip = false;
3045 
3046     // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing on device
3047     skip |= ValidateCommandBufferSimultaneousUse(pCB, current_submit_count);
3048 
3049     skip |= ValidateQueuedQFOTransfers(pCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
3050 
3051     for (auto pSubCB : pCB->linkedCommandBuffers) {
3052         skip |= ValidateQueuedQFOTransfers(pSubCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
3053         // TODO: replace with InvalidateCommandBuffers() at recording.
3054         if ((pSubCB->primaryCommandBuffer != pCB->commandBuffer) &&
3055             !(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3056             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
3057                     "VUID-vkQueueSubmit-pCommandBuffers-00073",
3058                     "%s was submitted with secondary %s but that buffer has subsequently been bound to "
3059                     "primary %s and it does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
3060                     report_data->FormatHandle(pCB->commandBuffer).c_str(), report_data->FormatHandle(pSubCB->commandBuffer).c_str(),
3061                     report_data->FormatHandle(pSubCB->primaryCommandBuffer).c_str());
3062         }
3063     }
3064 
3065     skip |= ValidateCommandBufferState(pCB, "vkQueueSubmit()", current_submit_count, "VUID-vkQueueSubmit-pCommandBuffers-00072");
3066 
3067     return skip;
3068 }
3069 
ValidateFenceForSubmit(const FENCE_STATE * pFence) const3070 bool CoreChecks::ValidateFenceForSubmit(const FENCE_STATE *pFence) const {
3071     bool skip = false;
3072 
3073     if (pFence && pFence->scope == kSyncScopeInternal) {
3074         if (pFence->state == FENCE_INFLIGHT) {
3075             // TODO: opportunities for "VUID-vkQueueSubmit-fence-00064", "VUID-vkQueueBindSparse-fence-01114",
3076             // "VUID-vkAcquireNextImageKHR-fence-01287"
3077             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
3078                             HandleToUint64(pFence->fence), kVUID_Core_DrawState_InvalidFence,
3079                             "%s is already in use by another submission.", report_data->FormatHandle(pFence->fence).c_str());
3080         }
3081 
3082         else if (pFence->state == FENCE_RETIRED) {
3083             // TODO: opportunities for "VUID-vkQueueSubmit-fence-00063", "VUID-vkQueueBindSparse-fence-01113",
3084             // "VUID-vkAcquireNextImageKHR-fence-01287"
3085             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
3086                             HandleToUint64(pFence->fence), kVUID_Core_MemTrack_FenceState,
3087                             "%s submitted in SIGNALED state.  Fences must be reset before being submitted",
3088                             report_data->FormatHandle(pFence->fence).c_str());
3089         }
3090     }
3091 
3092     return skip;
3093 }
3094 
PostCallRecordQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence,VkResult result)3095 void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
3096                                                        VkFence fence, VkResult result) {
3097     uint64_t early_retire_seq = 0;
3098     auto pQueue = GetQueueState(queue);
3099     auto pFence = GetFenceState(fence);
3100 
3101     if (pFence) {
3102         if (pFence->scope == kSyncScopeInternal) {
3103             // Mark fence in use
3104             SubmitFence(pQueue, pFence, std::max(1u, submitCount));
3105             if (!submitCount) {
3106                 // If no submissions, but just dropping a fence on the end of the queue,
3107                 // record an empty submission with just the fence, so we can determine
3108                 // its completion.
3109                 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
3110                                                  std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
3111             }
3112         } else {
3113             // Retire work up until this fence early, we will not see the wait that corresponds to this signal
3114             early_retire_seq = pQueue->seq + pQueue->submissions.size();
3115             if (!external_sync_warning) {
3116                 external_sync_warning = true;
3117                 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
3118                         kVUID_Core_DrawState_QueueForwardProgress,
3119                         "vkQueueSubmit(): Signaling external %s on %s will disable validation of preceding command "
3120                         "buffer lifecycle states and the in-use status of associated objects.",
3121                         report_data->FormatHandle(fence).c_str(), report_data->FormatHandle(queue).c_str());
3122             }
3123         }
3124     }
3125 
3126     // Now process each individual submit
3127     for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
3128         std::vector<VkCommandBuffer> cbs;
3129         const VkSubmitInfo *submit = &pSubmits[submit_idx];
3130         vector<SEMAPHORE_WAIT> semaphore_waits;
3131         vector<VkSemaphore> semaphore_signals;
3132         vector<VkSemaphore> semaphore_externals;
3133         for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
3134             VkSemaphore semaphore = submit->pWaitSemaphores[i];
3135             auto pSemaphore = GetSemaphoreState(semaphore);
3136             if (pSemaphore) {
3137                 if (pSemaphore->scope == kSyncScopeInternal) {
3138                     if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
3139                         semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
3140                         pSemaphore->in_use.fetch_add(1);
3141                     }
3142                     pSemaphore->signaler.first = VK_NULL_HANDLE;
3143                     pSemaphore->signaled = false;
3144                 } else {
3145                     semaphore_externals.push_back(semaphore);
3146                     pSemaphore->in_use.fetch_add(1);
3147                     if (pSemaphore->scope == kSyncScopeExternalTemporary) {
3148                         pSemaphore->scope = kSyncScopeInternal;
3149                     }
3150                 }
3151             }
3152         }
3153         for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
3154             VkSemaphore semaphore = submit->pSignalSemaphores[i];
3155             auto pSemaphore = GetSemaphoreState(semaphore);
3156             if (pSemaphore) {
3157                 if (pSemaphore->scope == kSyncScopeInternal) {
3158                     pSemaphore->signaler.first = queue;
3159                     pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
3160                     pSemaphore->signaled = true;
3161                     pSemaphore->in_use.fetch_add(1);
3162                     semaphore_signals.push_back(semaphore);
3163                 } else {
3164                     // Retire work up until this submit early, we will not see the wait that corresponds to this signal
3165                     early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
3166                     if (!external_sync_warning) {
3167                         external_sync_warning = true;
3168                         log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
3169                                 HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
3170                                 "vkQueueSubmit(): Signaling external %s on %s will disable validation of preceding "
3171                                 "command buffer lifecycle states and the in-use status of associated objects.",
3172                                 report_data->FormatHandle(semaphore).c_str(), report_data->FormatHandle(queue).c_str());
3173                     }
3174                 }
3175             }
3176         }
3177         for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
3178             auto cb_node = GetCBState(submit->pCommandBuffers[i]);
3179             if (cb_node) {
3180                 cbs.push_back(submit->pCommandBuffers[i]);
3181                 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
3182                     cbs.push_back(secondaryCmdBuffer->commandBuffer);
3183                     IncrementResources(secondaryCmdBuffer);
3184                 }
3185                 IncrementResources(cb_node);
3186             }
3187         }
3188         pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
3189                                          submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
3190     }
3191 
3192     if (early_retire_seq) {
3193         RetireWorkOnQueue(pQueue, early_retire_seq, true);
3194     }
3195 }
3196 
PostCallRecordQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence,VkResult result)3197 void CoreChecks::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
3198                                            VkResult result) {
3199     StateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
3200 
3201     // The triply nested for duplicates that in the StateTracker, but avoids the need for two additional callbacks.
3202     for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
3203         const VkSubmitInfo *submit = &pSubmits[submit_idx];
3204         for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
3205             auto cb_node = GetCBState(submit->pCommandBuffers[i]);
3206             if (cb_node) {
3207                 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
3208                     UpdateCmdBufImageLayouts(secondaryCmdBuffer);
3209                     RecordQueuedQFOTransfers(secondaryCmdBuffer);
3210                 }
3211                 UpdateCmdBufImageLayouts(cb_node);
3212                 RecordQueuedQFOTransfers(cb_node);
3213             }
3214         }
3215     }
3216 
3217     if (enabled.gpu_validation) {
3218         GpuPostCallQueueSubmit(queue, submitCount, pSubmits, fence);
3219     }
3220 }
ValidateSemaphoresForSubmit(VkQueue queue,const VkSubmitInfo * submit,unordered_set<VkSemaphore> * unsignaled_sema_arg,unordered_set<VkSemaphore> * signaled_sema_arg,unordered_set<VkSemaphore> * internal_sema_arg) const3221 bool CoreChecks::ValidateSemaphoresForSubmit(VkQueue queue, const VkSubmitInfo *submit,
3222                                              unordered_set<VkSemaphore> *unsignaled_sema_arg,
3223                                              unordered_set<VkSemaphore> *signaled_sema_arg,
3224                                              unordered_set<VkSemaphore> *internal_sema_arg) const {
3225     bool skip = false;
3226     unordered_set<VkSemaphore> &signaled_semaphores = *signaled_sema_arg;
3227     unordered_set<VkSemaphore> &unsignaled_semaphores = *unsignaled_sema_arg;
3228     unordered_set<VkSemaphore> &internal_semaphores = *internal_sema_arg;
3229 
3230     for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
3231         skip |=
3232             ValidateStageMaskGsTsEnables(submit->pWaitDstStageMask[i], "vkQueueSubmit()",
3233                                          "VUID-VkSubmitInfo-pWaitDstStageMask-00076", "VUID-VkSubmitInfo-pWaitDstStageMask-00077",
3234                                          "VUID-VkSubmitInfo-pWaitDstStageMask-02089", "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
3235         VkSemaphore semaphore = submit->pWaitSemaphores[i];
3236         const auto *pSemaphore = GetSemaphoreState(semaphore);
3237         if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
3238             if (unsignaled_semaphores.count(semaphore) || (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
3239                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
3240                                 HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
3241                                 "%s is waiting on %s that has no way to be signaled.", report_data->FormatHandle(queue).c_str(),
3242                                 report_data->FormatHandle(semaphore).c_str());
3243             } else {
3244                 signaled_semaphores.erase(semaphore);
3245                 unsignaled_semaphores.insert(semaphore);
3246             }
3247         }
3248         if (pSemaphore && pSemaphore->scope == kSyncScopeExternalTemporary) {
3249             internal_semaphores.insert(semaphore);
3250         }
3251     }
3252     for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
3253         VkSemaphore semaphore = submit->pSignalSemaphores[i];
3254         const auto *pSemaphore = GetSemaphoreState(semaphore);
3255         if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
3256             if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
3257                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
3258                                 HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
3259                                 "%s is signaling %s that was previously signaled by %s but has not since "
3260                                 "been waited on by any queue.",
3261                                 report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str(),
3262                                 report_data->FormatHandle(pSemaphore->signaler.first).c_str());
3263             } else {
3264                 unsignaled_semaphores.erase(semaphore);
3265                 signaled_semaphores.insert(semaphore);
3266             }
3267         }
3268     }
3269 
3270     return skip;
3271 }
ValidateCommandBuffersForSubmit(VkQueue queue,const VkSubmitInfo * submit,ImageSubresPairLayoutMap * localImageLayoutMap_arg,vector<VkCommandBuffer> * current_cmds_arg) const3272 bool CoreChecks::ValidateCommandBuffersForSubmit(VkQueue queue, const VkSubmitInfo *submit,
3273                                                  ImageSubresPairLayoutMap *localImageLayoutMap_arg,
3274                                                  vector<VkCommandBuffer> *current_cmds_arg) const {
3275     bool skip = false;
3276 
3277     ImageSubresPairLayoutMap &localImageLayoutMap = *localImageLayoutMap_arg;
3278     vector<VkCommandBuffer> &current_cmds = *current_cmds_arg;
3279 
3280     QFOTransferCBScoreboards<VkImageMemoryBarrier> qfo_image_scoreboards;
3281     QFOTransferCBScoreboards<VkBufferMemoryBarrier> qfo_buffer_scoreboards;
3282 
3283     for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
3284         const auto *cb_node = GetCBState(submit->pCommandBuffers[i]);
3285         if (cb_node) {
3286             skip |= ValidateCmdBufImageLayouts(cb_node, imageLayoutMap, &localImageLayoutMap);
3287             current_cmds.push_back(submit->pCommandBuffers[i]);
3288             skip |= ValidatePrimaryCommandBufferState(
3289                 cb_node, (int)std::count(current_cmds.begin(), current_cmds.end(), submit->pCommandBuffers[i]),
3290                 &qfo_image_scoreboards, &qfo_buffer_scoreboards);
3291             skip |= ValidateQueueFamilyIndices(cb_node, queue);
3292 
3293             // Potential early exit here as bad object state may crash in delayed function calls
3294             if (skip) {
3295                 return true;
3296             }
3297 
3298             // Call submit-time functions to validate/update state
3299             for (auto &function : cb_node->queue_submit_functions) {
3300                 skip |= function();
3301             }
3302             for (auto &function : cb_node->eventUpdates) {
3303                 skip |= function(queue);
3304             }
3305             for (auto &function : cb_node->queryUpdates) {
3306                 skip |= function(queue);
3307             }
3308         }
3309     }
3310     return skip;
3311 }
3312 
PreCallValidateQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)3313 bool CoreChecks::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
3314     const auto *pFence = GetFenceState(fence);
3315     bool skip = ValidateFenceForSubmit(pFence);
3316     if (skip) {
3317         return true;
3318     }
3319 
3320     unordered_set<VkSemaphore> signaled_semaphores;
3321     unordered_set<VkSemaphore> unsignaled_semaphores;
3322     unordered_set<VkSemaphore> internal_semaphores;
3323     vector<VkCommandBuffer> current_cmds;
3324     ImageSubresPairLayoutMap localImageLayoutMap;
3325     // Now verify each individual submit
3326     for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
3327         const VkSubmitInfo *submit = &pSubmits[submit_idx];
3328         skip |= ValidateSemaphoresForSubmit(queue, submit, &unsignaled_semaphores, &signaled_semaphores, &internal_semaphores);
3329         skip |= ValidateCommandBuffersForSubmit(queue, submit, &localImageLayoutMap, &current_cmds);
3330 
3331         auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupSubmitInfo>(submit->pNext);
3332         if (chained_device_group_struct && chained_device_group_struct->commandBufferCount > 0) {
3333             for (uint32_t i = 0; i < chained_device_group_struct->commandBufferCount; ++i) {
3334                 skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_device_group_struct->pCommandBufferDeviceMasks[i],
3335                                                                 VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
3336                                                                 "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086");
3337             }
3338         }
3339     }
3340     return skip;
3341 }
PreCallRecordQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)3342 void CoreChecks::PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
3343     if (enabled.gpu_validation && device_extensions.vk_ext_descriptor_indexing) {
3344         GpuPreCallRecordQueueSubmit(queue, submitCount, pSubmits, fence);
3345     }
3346 }
3347 
3348 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3349 // Android-specific validation that uses types defined only on Android and only for NDK versions
3350 // that support the VK_ANDROID_external_memory_android_hardware_buffer extension.
3351 // This chunk could move into a seperate core_validation_android.cpp file... ?
3352 
3353 // clang-format off
3354 
3355 // Map external format and usage flags to/from equivalent Vulkan flags
3356 // (Tables as of v1.1.92)
3357 
3358 // AHardwareBuffer Format                       Vulkan Format
3359 // ======================                       =============
3360 // AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM        VK_FORMAT_R8G8B8A8_UNORM
3361 // AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM        VK_FORMAT_R8G8B8A8_UNORM
3362 // AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM          VK_FORMAT_R8G8B8_UNORM
3363 // AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM          VK_FORMAT_R5G6B5_UNORM_PACK16
3364 // AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT    VK_FORMAT_R16G16B16A16_SFLOAT
3365 // AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM     VK_FORMAT_A2B10G10R10_UNORM_PACK32
3366 // AHARDWAREBUFFER_FORMAT_D16_UNORM             VK_FORMAT_D16_UNORM
3367 // AHARDWAREBUFFER_FORMAT_D24_UNORM             VK_FORMAT_X8_D24_UNORM_PACK32
3368 // AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT     VK_FORMAT_D24_UNORM_S8_UINT
3369 // AHARDWAREBUFFER_FORMAT_D32_FLOAT             VK_FORMAT_D32_SFLOAT
3370 // AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT     VK_FORMAT_D32_SFLOAT_S8_UINT
3371 // AHARDWAREBUFFER_FORMAT_S8_UINT               VK_FORMAT_S8_UINT
3372 
3373 // The AHARDWAREBUFFER_FORMAT_* are an enum in the NDK headers, but get passed in to Vulkan
3374 // as uint32_t. Casting the enums here avoids scattering casts around in the code.
3375 std::map<uint32_t, VkFormat> ahb_format_map_a2v = {
3376     { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM,        VK_FORMAT_R8G8B8A8_UNORM },
3377     { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM,        VK_FORMAT_R8G8B8A8_UNORM },
3378     { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM,          VK_FORMAT_R8G8B8_UNORM },
3379     { (uint32_t)AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM,          VK_FORMAT_R5G6B5_UNORM_PACK16 },
3380     { (uint32_t)AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT,    VK_FORMAT_R16G16B16A16_SFLOAT },
3381     { (uint32_t)AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM,     VK_FORMAT_A2B10G10R10_UNORM_PACK32 },
3382     { (uint32_t)AHARDWAREBUFFER_FORMAT_D16_UNORM,             VK_FORMAT_D16_UNORM },
3383     { (uint32_t)AHARDWAREBUFFER_FORMAT_D24_UNORM,             VK_FORMAT_X8_D24_UNORM_PACK32 },
3384     { (uint32_t)AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT,     VK_FORMAT_D24_UNORM_S8_UINT },
3385     { (uint32_t)AHARDWAREBUFFER_FORMAT_D32_FLOAT,             VK_FORMAT_D32_SFLOAT },
3386     { (uint32_t)AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT,     VK_FORMAT_D32_SFLOAT_S8_UINT },
3387     { (uint32_t)AHARDWAREBUFFER_FORMAT_S8_UINT,               VK_FORMAT_S8_UINT }
3388 };
3389 
3390 // AHardwareBuffer Usage                        Vulkan Usage or Creation Flag (Intermixed - Aargh!)
3391 // =====================                        ===================================================
3392 // None                                         VK_IMAGE_USAGE_TRANSFER_SRC_BIT
3393 // None                                         VK_IMAGE_USAGE_TRANSFER_DST_BIT
3394 // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE      VK_IMAGE_USAGE_SAMPLED_BIT
3395 // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE      VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
3396 // AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT       VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
3397 // AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP           VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
3398 // AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE    None
3399 // AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT      VK_IMAGE_CREATE_PROTECTED_BIT
3400 // None                                         VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
3401 // None                                         VK_IMAGE_CREATE_EXTENDED_USAGE_BIT
3402 
3403 // Same casting rationale. De-mixing the table to prevent type confusion and aliasing
3404 std::map<uint64_t, VkImageUsageFlags> ahb_usage_map_a2v = {
3405     { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE,    (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) },
3406     { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT,     VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT },
3407     { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE,  0 },   // No equivalent
3408 };
3409 
3410 std::map<uint64_t, VkImageCreateFlags> ahb_create_map_a2v = {
3411     { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP,         VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT },
3412     { (uint64_t)AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT,    VK_IMAGE_CREATE_PROTECTED_BIT },
3413     { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE,  0 },   // No equivalent
3414 };
3415 
3416 std::map<VkImageUsageFlags, uint64_t> ahb_usage_map_v2a = {
3417     { VK_IMAGE_USAGE_SAMPLED_BIT,           (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE },
3418     { VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,  (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE },
3419     { VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,  (uint64_t)AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT  },
3420 };
3421 
3422 std::map<VkImageCreateFlags, uint64_t> ahb_create_map_v2a = {
3423     { VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,  (uint64_t)AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP },
3424     { VK_IMAGE_CREATE_PROTECTED_BIT,        (uint64_t)AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT },
3425 };
3426 
3427 // clang-format on
3428 
3429 //
3430 // AHB-extension new APIs
3431 //
PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)3432 bool CoreChecks::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer,
3433                                                                           VkAndroidHardwareBufferPropertiesANDROID *pProperties) {
3434     bool skip = false;
3435     //  buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags.
3436     AHardwareBuffer_Desc ahb_desc;
3437     AHardwareBuffer_describe(buffer, &ahb_desc);
3438     uint32_t required_flags = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
3439                               AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
3440                               AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
3441     if (0 == (ahb_desc.usage & required_flags)) {
3442         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3443                         "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884",
3444                         "vkGetAndroidHardwareBufferPropertiesANDROID: The AHardwareBuffer's AHardwareBuffer_Desc.usage (0x%" PRIx64
3445                         ") does not have any AHARDWAREBUFFER_USAGE_GPU_* flags set.",
3446                         ahb_desc.usage);
3447     }
3448     return skip;
3449 }
3450 
PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties,VkResult result)3451 void CoreChecks::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer,
3452                                                                          VkAndroidHardwareBufferPropertiesANDROID *pProperties,
3453                                                                          VkResult result) {
3454     if (VK_SUCCESS != result) return;
3455     auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
3456     if (ahb_format_props) {
3457         ahb_ext_formats_set.insert(ahb_format_props->externalFormat);
3458     }
3459 }
3460 
PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)3461 bool CoreChecks::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,
3462                                                                       const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
3463                                                                       struct AHardwareBuffer **pBuffer) {
3464     bool skip = false;
3465     const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(pInfo->memory);
3466 
3467     // VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in
3468     // VkExportMemoryAllocateInfoKHR::handleTypes when memory was created.
3469     if (!mem_info->is_export ||
3470         (0 == (mem_info->export_handle_type_flags & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID))) {
3471         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3472                         "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882",
3473                         "vkGetMemoryAndroidHardwareBufferANDROID: %s was not allocated for export, or the "
3474                         "export handleTypes (0x%" PRIx32
3475                         ") did not contain VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.",
3476                         report_data->FormatHandle(pInfo->memory).c_str(), mem_info->export_handle_type_flags);
3477     }
3478 
3479     // If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo
3480     // with non-NULL image member, then that image must already be bound to memory.
3481     if (mem_info->is_dedicated && (VK_NULL_HANDLE != mem_info->dedicated_image)) {
3482         const auto image_state = GetImageState(mem_info->dedicated_image);
3483         if ((nullptr == image_state) || (0 == (image_state->GetBoundMemory().count(pInfo->memory)))) {
3484             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3485                             HandleToUint64(device), "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883",
3486                             "vkGetMemoryAndroidHardwareBufferANDROID: %s was allocated using a dedicated "
3487                             "%s, but that image is not bound to the VkDeviceMemory object.",
3488                             report_data->FormatHandle(pInfo->memory).c_str(),
3489                             report_data->FormatHandle(mem_info->dedicated_image).c_str());
3490         }
3491     }
3492 
3493     return skip;
3494 }
3495 
3496 //
3497 // AHB-specific validation within non-AHB APIs
3498 //
ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo * alloc_info) const3499 bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const {
3500     bool skip = false;
3501     auto import_ahb_info = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(alloc_info->pNext);
3502     auto exp_mem_alloc_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(alloc_info->pNext);
3503     auto mem_ded_alloc_info = lvl_find_in_chain<VkMemoryDedicatedAllocateInfo>(alloc_info->pNext);
3504 
3505     if ((import_ahb_info) && (NULL != import_ahb_info->buffer)) {
3506         // This is an import with handleType of VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID
3507         AHardwareBuffer_Desc ahb_desc = {};
3508         AHardwareBuffer_describe(import_ahb_info->buffer, &ahb_desc);
3509 
3510         //  If buffer is not NULL, it must be a valid Android hardware buffer object with AHardwareBuffer_Desc::format and
3511         //  AHardwareBuffer_Desc::usage compatible with Vulkan as described in Android Hardware Buffers.
3512         //
3513         //  BLOB & GPU_DATA_BUFFER combo specifically allowed
3514         if ((AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) || (0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
3515             // Otherwise, must be a combination from the AHardwareBuffer Format and Usage Equivalence tables
3516             // Usage must have at least one bit from the table. It may have additional bits not in the table
3517             uint64_t ahb_equiv_usage_bits = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
3518                                             AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
3519                                             AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
3520             if ((0 == (ahb_desc.usage & ahb_equiv_usage_bits)) || (0 == ahb_format_map_a2v.count(ahb_desc.format))) {
3521                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3522                                 HandleToUint64(device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881",
3523                                 "vkAllocateMemory: The AHardwareBuffer_Desc's format ( %u ) and/or usage ( 0x%" PRIx64
3524                                 " ) are not compatible with Vulkan.",
3525                                 ahb_desc.format, ahb_desc.usage);
3526             }
3527         }
3528 
3529         // Collect external buffer info
3530         VkPhysicalDeviceExternalBufferInfo pdebi = {};
3531         pdebi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
3532         pdebi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3533         if (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & ahb_desc.usage) {
3534             pdebi.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE];
3535         }
3536         if (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT & ahb_desc.usage) {
3537             pdebi.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT];
3538         }
3539         VkExternalBufferProperties ext_buf_props = {};
3540         ext_buf_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
3541 
3542         DispatchGetPhysicalDeviceExternalBufferProperties(physical_device, &pdebi, &ext_buf_props);
3543 
3544         // Collect external format info
3545         VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
3546         pdeifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
3547         pdeifi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3548         VkPhysicalDeviceImageFormatInfo2 pdifi2 = {};
3549         pdifi2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
3550         pdifi2.pNext = &pdeifi;
3551         if (0 < ahb_format_map_a2v.count(ahb_desc.format)) pdifi2.format = ahb_format_map_a2v[ahb_desc.format];
3552         pdifi2.type = VK_IMAGE_TYPE_2D;           // Seems likely
3553         pdifi2.tiling = VK_IMAGE_TILING_OPTIMAL;  // Ditto
3554         if (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & ahb_desc.usage) {
3555             pdifi2.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE];
3556         }
3557         if (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT & ahb_desc.usage) {
3558             pdifi2.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT];
3559         }
3560         if (AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP & ahb_desc.usage) {
3561             pdifi2.flags |= ahb_create_map_a2v[AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP];
3562         }
3563         if (AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT & ahb_desc.usage) {
3564             pdifi2.flags |= ahb_create_map_a2v[AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT];
3565         }
3566 
3567         VkExternalImageFormatProperties ext_img_fmt_props = {};
3568         ext_img_fmt_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
3569         VkImageFormatProperties2 ifp2 = {};
3570         ifp2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
3571         ifp2.pNext = &ext_img_fmt_props;
3572 
3573         VkResult fmt_lookup_result = GetPDImageFormatProperties2(&pdifi2, &ifp2);
3574 
3575         //  If buffer is not NULL, Android hardware buffers must be supported for import, as reported by
3576         //  VkExternalImageFormatProperties or VkExternalBufferProperties.
3577         if (0 == (ext_buf_props.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT)) {
3578             if ((VK_SUCCESS != fmt_lookup_result) || (0 == (ext_img_fmt_props.externalMemoryProperties.externalMemoryFeatures &
3579                                                             VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))) {
3580                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3581                                 HandleToUint64(device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880",
3582                                 "vkAllocateMemory: Neither the VkExternalImageFormatProperties nor the VkExternalBufferProperties "
3583                                 "structs for the AHardwareBuffer include the VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT flag.");
3584             }
3585         }
3586 
3587         // Retrieve buffer and format properties of the provided AHardwareBuffer
3588         VkAndroidHardwareBufferFormatPropertiesANDROID ahb_format_props = {};
3589         ahb_format_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
3590         VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
3591         ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
3592         ahb_props.pNext = &ahb_format_props;
3593         DispatchGetAndroidHardwareBufferPropertiesANDROID(device, import_ahb_info->buffer, &ahb_props);
3594 
3595         // allocationSize must be the size returned by vkGetAndroidHardwareBufferPropertiesANDROID for the Android hardware buffer
3596         if (alloc_info->allocationSize != ahb_props.allocationSize) {
3597             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3598                             HandleToUint64(device), "VUID-VkMemoryAllocateInfo-allocationSize-02383",
3599                             "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
3600                             "struct, allocationSize (%" PRId64
3601                             ") does not match the AHardwareBuffer's reported allocationSize (%" PRId64 ").",
3602                             alloc_info->allocationSize, ahb_props.allocationSize);
3603         }
3604 
3605         // memoryTypeIndex must be one of those returned by vkGetAndroidHardwareBufferPropertiesANDROID for the AHardwareBuffer
3606         // Note: memoryTypeIndex is an index, memoryTypeBits is a bitmask
3607         uint32_t mem_type_bitmask = 1 << alloc_info->memoryTypeIndex;
3608         if (0 == (mem_type_bitmask & ahb_props.memoryTypeBits)) {
3609             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3610                             HandleToUint64(device), "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385",
3611                             "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
3612                             "struct, memoryTypeIndex (%" PRId32
3613                             ") does not correspond to a bit set in AHardwareBuffer's reported "
3614                             "memoryTypeBits bitmask (0x%" PRIx32 ").",
3615                             alloc_info->memoryTypeIndex, ahb_props.memoryTypeBits);
3616         }
3617 
3618         // Checks for allocations without a dedicated allocation requirement
3619         if ((nullptr == mem_ded_alloc_info) || (VK_NULL_HANDLE == mem_ded_alloc_info->image)) {
3620             // the Android hardware buffer must have a format of AHARDWAREBUFFER_FORMAT_BLOB and a usage that includes
3621             // AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER
3622             if (((uint64_t)AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) ||
3623                 (0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
3624                 skip |= log_msg(
3625                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3626                     "VUID-VkMemoryAllocateInfo-pNext-02384",
3627                     "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
3628                     "struct without a dedicated allocation requirement, while the AHardwareBuffer_Desc's format ( %u ) is not "
3629                     "AHARDWAREBUFFER_FORMAT_BLOB or usage (0x%" PRIx64 ") does not include AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER.",
3630                     ahb_desc.format, ahb_desc.usage);
3631             }
3632         } else {  // Checks specific to import with a dedicated allocation requirement
3633             const VkImageCreateInfo *ici = &(GetImageState(mem_ded_alloc_info->image)->createInfo);
3634 
3635             // The Android hardware buffer's usage must include at least one of AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT or
3636             // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
3637             if (0 == (ahb_desc.usage & (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE))) {
3638                 skip |= log_msg(
3639                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3640                     "VUID-VkMemoryAllocateInfo-pNext-02386",
3641                     "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID and a "
3642                     "dedicated allocation requirement, while the AHardwareBuffer's usage (0x%" PRIx64
3643                     ") contains neither AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT nor AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE.",
3644                     ahb_desc.usage);
3645             }
3646 
3647             //  the format of image must be VK_FORMAT_UNDEFINED or the format returned by
3648             //  vkGetAndroidHardwareBufferPropertiesANDROID
3649             if ((ici->format != ahb_format_props.format) && (VK_FORMAT_UNDEFINED != ici->format)) {
3650                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3651                                 HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02387",
3652                                 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
3653                                 "VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
3654                                 "format (%s) is not VK_FORMAT_UNDEFINED and does not match the AHardwareBuffer's format (%s).",
3655                                 string_VkFormat(ici->format), string_VkFormat(ahb_format_props.format));
3656             }
3657 
3658             // The width, height, and array layer dimensions of image and the Android hardwarebuffer must be identical
3659             if ((ici->extent.width != ahb_desc.width) || (ici->extent.height != ahb_desc.height) ||
3660                 (ici->arrayLayers != ahb_desc.layers)) {
3661                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3662                                 HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02388",
3663                                 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
3664                                 "VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
3665                                 "width, height, and arrayLayers (%" PRId32 " %" PRId32 " %" PRId32
3666                                 ") do not match those of the AHardwareBuffer (%" PRId32 " %" PRId32 " %" PRId32 ").",
3667                                 ici->extent.width, ici->extent.height, ici->arrayLayers, ahb_desc.width, ahb_desc.height,
3668                                 ahb_desc.layers);
3669             }
3670 
3671             // If the Android hardware buffer's usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, the image must
3672             // have either a full mipmap chain or exactly 1 mip level.
3673             //
3674             // NOTE! The language of this VUID contradicts the language in the spec (1.1.93), which says "The
3675             // AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE flag does not correspond to a Vulkan image usage or creation flag. Instead,
3676             // its presence indicates that the Android hardware buffer contains a complete mipmap chain, and its absence indicates
3677             // that the Android hardware buffer contains only a single mip level."
3678             //
3679             // TODO: This code implements the VUID's meaning, but it seems likely that the spec text is actually correct.
3680             // Clarification requested.
3681             if ((ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE) && (ici->mipLevels != 1) &&
3682                 (ici->mipLevels != FullMipChainLevels(ici->extent))) {
3683                 skip |=
3684                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3685                             HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02389",
3686                             "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
3687                             "usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE but mipLevels (%" PRId32
3688                             ") is neither 1 nor full mip "
3689                             "chain levels (%" PRId32 ").",
3690                             ici->mipLevels, FullMipChainLevels(ici->extent));
3691             }
3692 
3693             // each bit set in the usage of image must be listed in AHardwareBuffer Usage Equivalence, and if there is a
3694             // corresponding AHARDWAREBUFFER_USAGE bit listed that bit must be included in the Android hardware buffer's
3695             // AHardwareBuffer_Desc::usage
3696             if (ici->usage &
3697                 ~(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
3698                   VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
3699                 skip |=
3700                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3701                             HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02390",
3702                             "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
3703                             "dedicated image usage bits include one or more with no AHardwareBuffer equivalent.");
3704             }
3705 
3706             bool illegal_usage = false;
3707             std::vector<VkImageUsageFlags> usages = {VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
3708                                                      VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT};
3709             for (VkImageUsageFlags ubit : usages) {
3710                 if (ici->usage & ubit) {
3711                     uint64_t ahb_usage = ahb_usage_map_v2a[ubit];
3712                     if (0 == (ahb_usage & ahb_desc.usage)) illegal_usage = true;
3713                 }
3714             }
3715             if (illegal_usage) {
3716                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3717                                 HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02390",
3718                                 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
3719                                 "VkImportAndroidHardwareBufferInfoANDROID, one or more AHardwareBuffer usage bits equivalent to "
3720                                 "the provided image's usage bits are missing from AHardwareBuffer_Desc.usage.");
3721             }
3722         }
3723     } else {  // Not an import
3724         if ((exp_mem_alloc_info) && (mem_ded_alloc_info) &&
3725             (0 != (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID & exp_mem_alloc_info->handleTypes)) &&
3726             (VK_NULL_HANDLE != mem_ded_alloc_info->image)) {
3727             // This is an Android HW Buffer export
3728             if (0 != alloc_info->allocationSize) {
3729                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3730                                 HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-01874",
3731                                 "vkAllocateMemory: pNext chain indicates a dedicated Android Hardware Buffer export allocation, "
3732                                 "but allocationSize is non-zero.");
3733             }
3734         } else {
3735             if (0 == alloc_info->allocationSize) {
3736                 skip |= log_msg(
3737                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3738                     "VUID-VkMemoryAllocateInfo-pNext-01874",
3739                     "vkAllocateMemory: pNext chain does not indicate a dedicated export allocation, but allocationSize is 0.");
3740             };
3741         }
3742     }
3743     return skip;
3744 }
3745 
ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const3746 bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const {
3747     bool skip = false;
3748 
3749     const IMAGE_STATE *image_state = GetImageState(image);
3750     if (image_state->imported_ahb && (0 == image_state->GetBoundMemory().size())) {
3751         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
3752                         "VUID-VkImageMemoryRequirementsInfo2-image-01897",
3753                         "vkGetImageMemoryRequirements2: Attempt to query layout from an image created with "
3754                         "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID handleType, which has not yet been "
3755                         "bound to memory.");
3756     }
3757     return skip;
3758 }
3759 
ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data * report_data,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,const VkImageFormatProperties2 * pImageFormatProperties)3760 static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data *report_data,
3761                                                                    const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
3762                                                                    const VkImageFormatProperties2 *pImageFormatProperties) {
3763     bool skip = false;
3764     const VkAndroidHardwareBufferUsageANDROID *ahb_usage =
3765         lvl_find_in_chain<VkAndroidHardwareBufferUsageANDROID>(pImageFormatProperties->pNext);
3766     if (nullptr != ahb_usage) {
3767         const VkPhysicalDeviceExternalImageFormatInfo *pdeifi =
3768             lvl_find_in_chain<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
3769         if ((nullptr == pdeifi) || (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID != pdeifi->handleType)) {
3770             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3771                             "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868",
3772                             "vkGetPhysicalDeviceImageFormatProperties2: pImageFormatProperties includes a chained "
3773                             "VkAndroidHardwareBufferUsageANDROID struct, but pImageFormatInfo does not include a chained "
3774                             "VkPhysicalDeviceExternalImageFormatInfo struct with handleType "
3775                             "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.");
3776         }
3777     }
3778     return skip;
3779 }
3780 
ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo * create_info) const3781 bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info) const {
3782     const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
3783     if ((nullptr != ext_format_android) && (0 != ext_format_android->externalFormat)) {
3784         if (VK_FORMAT_UNDEFINED != create_info->format) {
3785             return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
3786                            "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
3787                            "vkCreateSamplerYcbcrConversion[KHR]: CreateInfo format is not VK_FORMAT_UNDEFINED while "
3788                            "there is a chained VkExternalFormatANDROID struct.");
3789         }
3790     } else if (VK_FORMAT_UNDEFINED == create_info->format) {
3791         return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
3792                        "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
3793                        "vkCreateSamplerYcbcrConversion[KHR]: CreateInfo format is VK_FORMAT_UNDEFINED with no chained "
3794                        "VkExternalFormatANDROID struct.");
3795     }
3796     return false;
3797 }
3798 
RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo * create_info,VkSamplerYcbcrConversion ycbcr_conversion)3799 void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
3800                                                                        VkSamplerYcbcrConversion ycbcr_conversion) {
3801     const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
3802     if (ext_format_android && (0 != ext_format_android->externalFormat)) {
3803         ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
3804     }
3805 };
3806 
RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion)3807 void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
3808     ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
3809 };
3810 
3811 #else  // !VK_USE_PLATFORM_ANDROID_KHR
3812 
ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo * alloc_info) const3813 bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const { return false; }
3814 
ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data * report_data,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,const VkImageFormatProperties2 * pImageFormatProperties)3815 static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data *report_data,
3816                                                                    const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
3817                                                                    const VkImageFormatProperties2 *pImageFormatProperties) {
3818     return false;
3819 }
3820 
ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo * create_info) const3821 bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info) const {
3822     return false;
3823 }
3824 
ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const3825 bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const { return false; }
3826 
RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo * create_info,VkSamplerYcbcrConversion ycbcr_conversion)3827 void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
3828                                                                        VkSamplerYcbcrConversion ycbcr_conversion){};
3829 
RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion)3830 void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
3831 
3832 #endif  // VK_USE_PLATFORM_ANDROID_KHR
3833 
PreCallValidateAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)3834 bool CoreChecks::PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
3835                                                const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
3836     bool skip = false;
3837     if (memObjMap.size() >= phys_dev_props.limits.maxMemoryAllocationCount) {
3838         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3839                         kVUIDUndefined, "Number of currently valid memory objects is not less than the maximum allowed (%u).",
3840                         phys_dev_props.limits.maxMemoryAllocationCount);
3841     }
3842 
3843     if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
3844         skip |= ValidateAllocateMemoryANDROID(pAllocateInfo);
3845     } else {
3846         if (0 == pAllocateInfo->allocationSize) {
3847             skip |=
3848                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
3849                         "VUID-VkMemoryAllocateInfo-allocationSize-00638", "vkAllocateMemory: allocationSize is 0.");
3850         };
3851     }
3852 
3853     auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
3854     if (chained_flags_struct && chained_flags_struct->flags == VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT) {
3855         skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_flags_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3856                                                         HandleToUint64(device), "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675");
3857         skip |= ValidateDeviceMaskToZero(chained_flags_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
3858                                          HandleToUint64(device), "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676");
3859     }
3860     // TODO: VUIDs ending in 00643, 00644, 00646, 00647, 01742, 01743, 01745, 00645, 00648, 01744
3861     return skip;
3862 }
3863 
PostCallRecordAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory,VkResult result)3864 void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
3865                                                           const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
3866                                                           VkResult result) {
3867     if (VK_SUCCESS == result) {
3868         AddMemObjInfo(device, *pMemory, pAllocateInfo);
3869     }
3870     return;
3871 }
3872 
3873 // For given obj node, if it is use, flag a validation error and return callback result, else return false
ValidateObjectNotInUse(const BASE_NODE * obj_node,const VulkanTypedHandle & obj_struct,const char * caller_name,const char * error_code) const3874 bool CoreChecks::ValidateObjectNotInUse(const BASE_NODE *obj_node, const VulkanTypedHandle &obj_struct, const char *caller_name,
3875                                         const char *error_code) const {
3876     if (disabled.object_in_use) return false;
3877     bool skip = false;
3878     if (obj_node->in_use.load()) {
3879         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_struct.type], obj_struct.handle,
3880                         error_code, "Cannot call %s on %s that is currently in use by a command buffer.", caller_name,
3881                         report_data->FormatHandle(obj_struct).c_str());
3882     }
3883     return skip;
3884 }
3885 
PreCallValidateFreeMemory(VkDevice device,VkDeviceMemory mem,const VkAllocationCallbacks * pAllocator)3886 bool CoreChecks::PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
3887     const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
3888     const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
3889     bool skip = false;
3890     if (mem_info) {
3891         skip |= ValidateObjectNotInUse(mem_info, obj_struct, "vkFreeMemory", "VUID-vkFreeMemory-memory-00677");
3892         for (const auto &obj : mem_info->obj_bindings) {
3893             log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, get_debug_report_enum[obj.type], obj.handle,
3894                     kVUID_Core_MemTrack_FreedMemRef, "%s still has a reference to %s.", report_data->FormatHandle(obj).c_str(),
3895                     report_data->FormatHandle(mem_info->mem).c_str());
3896         }
3897     }
3898     return skip;
3899 }
3900 
PreCallRecordFreeMemory(VkDevice device,VkDeviceMemory mem,const VkAllocationCallbacks * pAllocator)3901 void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
3902     if (!mem) return;
3903     DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
3904     const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
3905 
3906     // Clear mem binding for any bound objects
3907     for (const auto &obj : mem_info->obj_bindings) {
3908         BINDABLE *bindable_state = nullptr;
3909         switch (obj.type) {
3910             case kVulkanObjectTypeImage:
3911                 bindable_state = GetImageState(obj.Cast<VkImage>());
3912                 break;
3913             case kVulkanObjectTypeBuffer:
3914                 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
3915                 break;
3916             case kVulkanObjectTypeAccelerationStructureNV:
3917                 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
3918                 break;
3919 
3920             default:
3921                 // Should only have acceleration structure, buffer, or image objects bound to memory
3922                 assert(0);
3923         }
3924 
3925         assert(bindable_state);
3926         bindable_state->binding.mem = MEMORY_UNBOUND;
3927         bindable_state->UpdateBoundMemorySet();
3928     }
3929     // Any bound cmd buffers are now invalid
3930     InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
3931     memObjMap.erase(mem);
3932 }
3933 
3934 // Validate that given Map memory range is valid. This means that the memory should not already be mapped,
3935 //  and that the size of the map range should be:
3936 //  1. Not zero
3937 //  2. Within the size of the memory allocation
ValidateMapMemRange(VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size)3938 bool CoreChecks::ValidateMapMemRange(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size) {
3939     bool skip = false;
3940 
3941     if (size == 0) {
3942         skip =
3943             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, HandleToUint64(mem),
3944                     kVUID_Core_MemTrack_InvalidMap, "VkMapMemory: Attempting to map memory range of size zero");
3945     }
3946 
3947     auto mem_element = memObjMap.find(mem);
3948     if (mem_element != memObjMap.end()) {
3949         auto mem_info = mem_element->second.get();
3950         // It is an application error to call VkMapMemory on an object that is already mapped
3951         if (mem_info->mem_range.size != 0) {
3952             skip =
3953                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
3954                         HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
3955                         "VkMapMemory: Attempting to map memory on an already-mapped %s.", report_data->FormatHandle(mem).c_str());
3956         }
3957 
3958         // Validate that offset + size is within object's allocationSize
3959         if (size == VK_WHOLE_SIZE) {
3960             if (offset >= mem_info->alloc_info.allocationSize) {
3961                 skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
3962                                HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
3963                                "Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64
3964                                " with size of VK_WHOLE_SIZE oversteps total array size 0x%" PRIx64,
3965                                offset, mem_info->alloc_info.allocationSize, mem_info->alloc_info.allocationSize);
3966             }
3967         } else {
3968             if ((offset + size) > mem_info->alloc_info.allocationSize) {
3969                 skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
3970                                HandleToUint64(mem), "VUID-vkMapMemory-size-00681",
3971                                "Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64 " oversteps total array size 0x%" PRIx64 ".",
3972                                offset, size + offset, mem_info->alloc_info.allocationSize);
3973             }
3974         }
3975     }
3976     return skip;
3977 }
3978 
StoreMemRanges(VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size)3979 void CoreChecks::StoreMemRanges(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size) {
3980     auto mem_info = GetDevMemState(mem);
3981     if (mem_info) {
3982         mem_info->mem_range.offset = offset;
3983         mem_info->mem_range.size = size;
3984     }
3985 }
3986 
3987 // Guard value for pad data
3988 static char NoncoherentMemoryFillValue = 0xb;
3989 
InitializeAndTrackMemory(VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size,void ** ppData)3990 void CoreChecks::InitializeAndTrackMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
3991     auto mem_info = GetDevMemState(mem);
3992     if (mem_info) {
3993         mem_info->p_driver_data = *ppData;
3994         uint32_t index = mem_info->alloc_info.memoryTypeIndex;
3995         if (phys_dev_mem_props.memoryTypes[index].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
3996             mem_info->shadow_copy = 0;
3997         } else {
3998             if (size == VK_WHOLE_SIZE) {
3999                 size = mem_info->alloc_info.allocationSize - offset;
4000             }
4001             mem_info->shadow_pad_size = phys_dev_props.limits.minMemoryMapAlignment;
4002             assert(SafeModulo(mem_info->shadow_pad_size, phys_dev_props.limits.minMemoryMapAlignment) == 0);
4003             // Ensure start of mapped region reflects hardware alignment constraints
4004             uint64_t map_alignment = phys_dev_props.limits.minMemoryMapAlignment;
4005 
4006             // From spec: (ppData - offset) must be aligned to at least limits::minMemoryMapAlignment.
4007             uint64_t start_offset = offset % map_alignment;
4008             // Data passed to driver will be wrapped by a guardband of data to detect over- or under-writes.
4009             mem_info->shadow_copy_base =
4010                 malloc(static_cast<size_t>(2 * mem_info->shadow_pad_size + size + map_alignment + start_offset));
4011 
4012             mem_info->shadow_copy =
4013                 reinterpret_cast<char *>((reinterpret_cast<uintptr_t>(mem_info->shadow_copy_base) + map_alignment) &
4014                                          ~(map_alignment - 1)) +
4015                 start_offset;
4016             assert(SafeModulo(reinterpret_cast<uintptr_t>(mem_info->shadow_copy) + mem_info->shadow_pad_size - start_offset,
4017                               map_alignment) == 0);
4018 
4019             memset(mem_info->shadow_copy, NoncoherentMemoryFillValue, static_cast<size_t>(2 * mem_info->shadow_pad_size + size));
4020             *ppData = static_cast<char *>(mem_info->shadow_copy) + mem_info->shadow_pad_size;
4021         }
4022     }
4023 }
4024 
RetireFence(VkFence fence)4025 void CoreChecks::RetireFence(VkFence fence) {
4026     auto pFence = GetFenceState(fence);
4027     if (pFence && pFence->scope == kSyncScopeInternal) {
4028         if (pFence->signaler.first != VK_NULL_HANDLE) {
4029             // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
4030             RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second, true);
4031         } else {
4032             // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
4033             // the fence as retired.
4034             pFence->state = FENCE_RETIRED;
4035         }
4036     }
4037 }
4038 
PreCallValidateWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)4039 bool CoreChecks::PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
4040                                               uint64_t timeout) {
4041     // Verify fence status of submitted fences
4042     bool skip = false;
4043     for (uint32_t i = 0; i < fenceCount; i++) {
4044         skip |= VerifyQueueStateToFence(pFences[i]);
4045     }
4046     return skip;
4047 }
4048 
PostCallRecordWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout,VkResult result)4049 void CoreChecks::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
4050                                              uint64_t timeout, VkResult result) {
4051     if (VK_SUCCESS != result) return;
4052 
4053     // When we know that all fences are complete we can clean/remove their CBs
4054     if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
4055         for (uint32_t i = 0; i < fenceCount; i++) {
4056             RetireFence(pFences[i]);
4057         }
4058     }
4059     // NOTE : Alternate case not handled here is when some fences have completed. In
4060     //  this case for app to guarantee which fences completed it will have to call
4061     //  vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
4062 }
4063 
PostCallRecordGetFenceStatus(VkDevice device,VkFence fence,VkResult result)4064 void CoreChecks::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
4065     if (VK_SUCCESS != result) return;
4066     RetireFence(fence);
4067 }
4068 
RecordGetDeviceQueueState(uint32_t queue_family_index,VkQueue queue)4069 void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
4070     // Add queue to tracking set only if it is new
4071     auto queue_is_new = queues.emplace(queue);
4072     if (queue_is_new.second == true) {
4073         QUEUE_STATE *queue_state = &queueMap[queue];
4074         queue_state->queue = queue;
4075         queue_state->queueFamilyIndex = queue_family_index;
4076         queue_state->seq = 0;
4077     }
4078 }
4079 
ValidateGetDeviceQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue,const char * valid_qfi_vuid,const char * qfi_in_range_vuid) const4080 bool CoreChecks::ValidateGetDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue, const char *valid_qfi_vuid,
4081                                         const char *qfi_in_range_vuid) const {
4082     bool skip = false;
4083 
4084     skip |= ValidateDeviceQueueFamily(queueFamilyIndex, "vkGetDeviceQueue", "queueFamilyIndex", valid_qfi_vuid);
4085     const auto &queue_data = queue_family_index_map.find(queueFamilyIndex);
4086     if (queue_data != queue_family_index_map.end() && queue_data->second <= queueIndex) {
4087         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
4088                         qfi_in_range_vuid,
4089                         "vkGetDeviceQueue: queueIndex (=%" PRIu32
4090                         ") is not less than the number of queues requested from queueFamilyIndex (=%" PRIu32
4091                         ") when the device was created (i.e. is not less than %" PRIu32 ").",
4092                         queueIndex, queueFamilyIndex, queue_data->second);
4093     }
4094     return skip;
4095 }
4096 
PreCallValidateGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)4097 bool CoreChecks::PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
4098     return ValidateGetDeviceQueue(queueFamilyIndex, queueIndex, pQueue, "VUID-vkGetDeviceQueue-queueFamilyIndex-00384",
4099                                   "VUID-vkGetDeviceQueue-queueIndex-00385");
4100 }
4101 
PostCallRecordGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)4102 void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
4103                                                           VkQueue *pQueue) {
4104     RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
4105 }
4106 
PostCallRecordGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)4107 void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
4108     RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
4109 }
4110 
PreCallValidateQueueWaitIdle(VkQueue queue)4111 bool CoreChecks::PreCallValidateQueueWaitIdle(VkQueue queue) {
4112     QUEUE_STATE *queue_state = GetQueueState(queue);
4113     return VerifyQueueStateToSeq(queue_state, queue_state->seq + queue_state->submissions.size());
4114 }
4115 
PostCallRecordQueueWaitIdle(VkQueue queue,VkResult result)4116 void CoreChecks::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
4117     if (VK_SUCCESS != result) return;
4118     QUEUE_STATE *queue_state = GetQueueState(queue);
4119     RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size(), true);
4120 }
4121 
PreCallValidateDeviceWaitIdle(VkDevice device)4122 bool CoreChecks::PreCallValidateDeviceWaitIdle(VkDevice device) {
4123     bool skip = false;
4124     for (auto &queue : queueMap) {
4125         skip |= VerifyQueueStateToSeq(&queue.second, queue.second.seq + queue.second.submissions.size());
4126     }
4127     return skip;
4128 }
4129 
PostCallRecordDeviceWaitIdle(VkDevice device,VkResult result)4130 void CoreChecks::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
4131     if (VK_SUCCESS != result) return;
4132     for (auto &queue : queueMap) {
4133         RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size(), true);
4134     }
4135 }
4136 
PreCallValidateDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)4137 bool CoreChecks::PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
4138     const FENCE_STATE *fence_node = GetFenceState(fence);
4139     bool skip = false;
4140     if (fence_node) {
4141         if (fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
4142             skip |=
4143                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
4144                         "VUID-vkDestroyFence-fence-01120", "%s is in use.", report_data->FormatHandle(fence).c_str());
4145         }
4146     }
4147     return skip;
4148 }
4149 
PreCallRecordDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)4150 void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
4151     if (!fence) return;
4152     fenceMap.erase(fence);
4153 }
4154 
PreCallValidateDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)4155 bool CoreChecks::PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
4156     const SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
4157     const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
4158     bool skip = false;
4159     if (sema_node) {
4160         skip |= ValidateObjectNotInUse(sema_node, obj_struct, "vkDestroySemaphore", "VUID-vkDestroySemaphore-semaphore-01137");
4161     }
4162     return skip;
4163 }
4164 
PreCallRecordDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)4165 void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
4166                                                            const VkAllocationCallbacks *pAllocator) {
4167     if (!semaphore) return;
4168     semaphoreMap.erase(semaphore);
4169 }
4170 
PreCallValidateDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)4171 bool CoreChecks::PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
4172     const EVENT_STATE *event_state = GetEventState(event);
4173     const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
4174     bool skip = false;
4175     if (event_state) {
4176         skip |= ValidateObjectNotInUse(event_state, obj_struct, "vkDestroyEvent", "VUID-vkDestroyEvent-event-01145");
4177     }
4178     return skip;
4179 }
4180 
PreCallRecordDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)4181 void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
4182     if (!event) return;
4183     EVENT_STATE *event_state = GetEventState(event);
4184     const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
4185     InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
4186     eventMap.erase(event);
4187 }
4188 
PreCallValidateDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)4189 bool CoreChecks::PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
4190     if (disabled.query_validation) return false;
4191     const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
4192     const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
4193     bool skip = false;
4194     if (qp_state) {
4195         skip |= ValidateObjectNotInUse(qp_state, obj_struct, "vkDestroyQueryPool", "VUID-vkDestroyQueryPool-queryPool-00793");
4196     }
4197     return skip;
4198 }
4199 
PreCallRecordDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)4200 void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
4201                                                            const VkAllocationCallbacks *pAllocator) {
4202     if (!queryPool) return;
4203     QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
4204     const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
4205     InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
4206     queryPoolMap.erase(queryPool);
4207 }
4208 
ValidateGetQueryPoolResultsFlags(VkQueryPool queryPool,VkQueryResultFlags flags) const4209 bool CoreChecks::ValidateGetQueryPoolResultsFlags(VkQueryPool queryPool, VkQueryResultFlags flags) const {
4210     bool skip = false;
4211     const auto query_pool_state = GetQueryPoolState(queryPool);
4212     if (query_pool_state) {
4213         if ((query_pool_state->createInfo.queryType == VK_QUERY_TYPE_TIMESTAMP) && (flags & VK_QUERY_RESULT_PARTIAL_BIT)) {
4214             skip |= log_msg(
4215                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, HandleToUint64(queryPool),
4216                 "VUID-vkGetQueryPoolResults-queryType-00818",
4217                 "%s was created with a queryType of VK_QUERY_TYPE_TIMESTAMP but flags contains VK_QUERY_RESULT_PARTIAL_BIT.",
4218                 report_data->FormatHandle(queryPool).c_str());
4219         }
4220     }
4221     return skip;
4222 }
4223 
ValidateGetQueryPoolResultsQueries(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount) const4224 bool CoreChecks::ValidateGetQueryPoolResultsQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const {
4225     bool skip = false;
4226     QueryObject query_obj{queryPool, 0u};
4227     for (uint32_t i = 0; i < queryCount; ++i) {
4228         query_obj.query = firstQuery + i;
4229         if (queryToStateMap.count(query_obj) == 0) {
4230             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
4231                             HandleToUint64(queryPool), kVUID_Core_DrawState_InvalidQuery,
4232                             "vkGetQueryPoolResults() on %s and query %" PRIu32 ": unknown query",
4233                             report_data->FormatHandle(queryPool).c_str(), query_obj.query);
4234         }
4235     }
4236     return skip;
4237 }
4238 
PreCallValidateGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)4239 bool CoreChecks::PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4240                                                     uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride,
4241                                                     VkQueryResultFlags flags) {
4242     if (disabled.query_validation) return false;
4243     bool skip = false;
4244     skip |= ValidateQueryPoolStride("VUID-vkGetQueryPoolResults-flags-00814", "VUID-vkGetQueryPoolResults-flags-00815", stride,
4245                                     "dataSize", dataSize, flags);
4246     skip |= ValidateGetQueryPoolResultsFlags(queryPool, flags);
4247     skip |= ValidateGetQueryPoolResultsQueries(queryPool, firstQuery, queryCount);
4248 
4249     return skip;
4250 }
4251 
ValidateInsertMemoryRange(const VulkanTypedHandle & typed_handle,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize memoryOffset,const VkMemoryRequirements & memRequirements,bool is_linear,const char * api_name) const4252 bool CoreChecks::ValidateInsertMemoryRange(const VulkanTypedHandle &typed_handle, const DEVICE_MEMORY_STATE *mem_info,
4253                                            VkDeviceSize memoryOffset, const VkMemoryRequirements &memRequirements, bool is_linear,
4254                                            const char *api_name) const {
4255     bool skip = false;
4256 
4257     if (memoryOffset >= mem_info->alloc_info.allocationSize) {
4258         const char *error_code = nullptr;
4259         if (typed_handle.type == kVulkanObjectTypeBuffer) {
4260             error_code = "VUID-vkBindBufferMemory-memoryOffset-01031";
4261         } else if (typed_handle.type == kVulkanObjectTypeImage) {
4262             error_code = "VUID-vkBindImageMemory-memoryOffset-01046";
4263         } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
4264             error_code = "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451";
4265         } else {
4266             // Unsupported object type
4267             assert(false);
4268         }
4269 
4270         skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
4271                        HandleToUint64(mem_info->mem), error_code,
4272                        "In %s, attempting to bind %s to %s, memoryOffset=0x%" PRIxLEAST64
4273                        " must be less than the memory allocation size 0x%" PRIxLEAST64 ".",
4274                        api_name, report_data->FormatHandle(mem_info->mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
4275                        memoryOffset, mem_info->alloc_info.allocationSize);
4276     }
4277 
4278     return skip;
4279 }
4280 
4281 // Object with given handle is being bound to memory w/ given mem_info struct.
4282 //  Track the newly bound memory range with given memoryOffset
4283 //  Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
4284 //  and non-linear range incorrectly overlap.
4285 // Return true if an error is flagged and the user callback returns "true", otherwise false
4286 // is_image indicates an image object, otherwise handle is for a buffer
4287 // is_linear indicates a buffer or linear image
InsertMemoryRange(const VulkanTypedHandle & typed_handle,DEVICE_MEMORY_STATE * mem_info,VkDeviceSize memoryOffset,VkMemoryRequirements memRequirements,bool is_linear)4288 void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
4289                                                VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
4290     if (typed_handle.type == kVulkanObjectTypeImage) {
4291         mem_info->bound_images.insert(typed_handle.handle);
4292     } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
4293         mem_info->bound_buffers.insert(typed_handle.handle);
4294     } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
4295         mem_info->bound_acceleration_structures.insert(typed_handle.handle);
4296     } else {
4297         // Unsupported object type
4298         assert(false);
4299     }
4300 }
4301 
ValidateInsertImageMemoryRange(VkImage image,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const VkMemoryRequirements & mem_reqs,bool is_linear,const char * api_name) const4302 bool CoreChecks::ValidateInsertImageMemoryRange(VkImage image, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
4303                                                 const VkMemoryRequirements &mem_reqs, bool is_linear, const char *api_name) const {
4304     return ValidateInsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear,
4305                                      api_name);
4306 }
InsertImageMemoryRange(VkImage image,DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,VkMemoryRequirements mem_reqs,bool is_linear)4307 void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
4308                                                     VkMemoryRequirements mem_reqs, bool is_linear) {
4309     InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
4310 }
4311 
ValidateInsertBufferMemoryRange(VkBuffer buffer,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const VkMemoryRequirements & mem_reqs,const char * api_name) const4312 bool CoreChecks::ValidateInsertBufferMemoryRange(VkBuffer buffer, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
4313                                                  const VkMemoryRequirements &mem_reqs, const char *api_name) const {
4314     return ValidateInsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true,
4315                                      api_name);
4316 }
InsertBufferMemoryRange(VkBuffer buffer,DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const VkMemoryRequirements & mem_reqs)4317 void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
4318                                                      const VkMemoryRequirements &mem_reqs) {
4319     InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
4320 }
4321 
ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const VkMemoryRequirements & mem_reqs,const char * api_name) const4322 bool CoreChecks::ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, const DEVICE_MEMORY_STATE *mem_info,
4323                                                                 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs,
4324                                                                 const char *api_name) const {
4325     return ValidateInsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset,
4326                                      mem_reqs, true, api_name);
4327 }
InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as,DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const VkMemoryRequirements & mem_reqs)4328 void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
4329                                                                     VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
4330     InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
4331 }
4332 
4333 // This function will remove the handle-to-index mapping from the appropriate map.
RemoveMemoryRange(uint64_t handle,DEVICE_MEMORY_STATE * mem_info,VulkanObjectType object_type)4334 static void RemoveMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info, VulkanObjectType object_type) {
4335     if (object_type == kVulkanObjectTypeImage) {
4336         mem_info->bound_images.erase(handle);
4337     } else if (object_type == kVulkanObjectTypeBuffer) {
4338         mem_info->bound_buffers.erase(handle);
4339     } else if (object_type == kVulkanObjectTypeAccelerationStructureNV) {
4340         mem_info->bound_acceleration_structures.erase(handle);
4341     } else {
4342         // Unsupported object type
4343         assert(false);
4344     }
4345 }
4346 
RemoveBufferMemoryRange(uint64_t handle,DEVICE_MEMORY_STATE * mem_info)4347 void ValidationStateTracker::RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info) {
4348     RemoveMemoryRange(handle, mem_info, kVulkanObjectTypeBuffer);
4349 }
4350 
RemoveImageMemoryRange(uint64_t handle,DEVICE_MEMORY_STATE * mem_info)4351 void ValidationStateTracker::RemoveImageMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info) {
4352     RemoveMemoryRange(handle, mem_info, kVulkanObjectTypeImage);
4353 }
4354 
RemoveAccelerationStructureMemoryRange(uint64_t handle,DEVICE_MEMORY_STATE * mem_info)4355 void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info) {
4356     RemoveMemoryRange(handle, mem_info, kVulkanObjectTypeAccelerationStructureNV);
4357 }
4358 
ValidateMemoryTypes(const DEVICE_MEMORY_STATE * mem_info,const uint32_t memory_type_bits,const char * funcName,const char * msgCode) const4359 bool CoreChecks::ValidateMemoryTypes(const DEVICE_MEMORY_STATE *mem_info, const uint32_t memory_type_bits, const char *funcName,
4360                                      const char *msgCode) const {
4361     bool skip = false;
4362     if (((1 << mem_info->alloc_info.memoryTypeIndex) & memory_type_bits) == 0) {
4363         skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
4364                        HandleToUint64(mem_info->mem), msgCode,
4365                        "%s(): MemoryRequirements->memoryTypeBits (0x%X) for this object type are not compatible with the memory "
4366                        "type (0x%X) of %s.",
4367                        funcName, memory_type_bits, mem_info->alloc_info.memoryTypeIndex,
4368                        report_data->FormatHandle(mem_info->mem).c_str());
4369     }
4370     return skip;
4371 }
4372 
ValidateBindBufferMemory(VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memoryOffset,const char * api_name) const4373 bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
4374                                           const char *api_name) const {
4375     const BUFFER_STATE *buffer_state = GetBufferState(buffer);
4376 
4377     bool skip = false;
4378     if (buffer_state) {
4379         // Track objects tied to memory
4380         uint64_t buffer_handle = HandleToUint64(buffer);
4381         const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
4382         skip = ValidateSetMemBinding(mem, obj_struct, api_name);
4383         if (!buffer_state->memory_requirements_checked) {
4384             // There's not an explicit requirement in the spec to call vkGetBufferMemoryRequirements() prior to calling
4385             // BindBufferMemory, but it's implied in that memory being bound must conform with VkMemoryRequirements from
4386             // vkGetBufferMemoryRequirements()
4387             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
4388                             kVUID_Core_BindBuffer_NoMemReqQuery,
4389                             "%s: Binding memory to %s but vkGetBufferMemoryRequirements() has not been called on that buffer.",
4390                             api_name, report_data->FormatHandle(buffer).c_str());
4391             // In the following we'll use the information we got in CreateBuffer
4392         }
4393 
4394         // Validate bound memory range information
4395         const auto mem_info = GetDevMemState(mem);
4396         if (mem_info) {
4397             skip |= ValidateInsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements, api_name);
4398             skip |= ValidateMemoryTypes(mem_info, buffer_state->requirements.memoryTypeBits, api_name,
4399                                         "VUID-vkBindBufferMemory-memory-01035");
4400         }
4401 
4402         // Validate memory requirements alignment
4403         if (SafeModulo(memoryOffset, buffer_state->requirements.alignment) != 0) {
4404             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
4405                             "VUID-vkBindBufferMemory-memoryOffset-01036",
4406                             "%s: memoryOffset is 0x%" PRIxLEAST64
4407                             " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
4408                             ", returned from a call to vkGetBufferMemoryRequirements with buffer.",
4409                             api_name, memoryOffset, buffer_state->requirements.alignment);
4410         }
4411 
4412         if (mem_info) {
4413             // Validate memory requirements size
4414             if (buffer_state->requirements.size > (mem_info->alloc_info.allocationSize - memoryOffset)) {
4415                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
4416                                 "VUID-vkBindBufferMemory-size-01037",
4417                                 "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
4418                                 " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
4419                                 ", returned from a call to vkGetBufferMemoryRequirements with buffer.",
4420                                 api_name, mem_info->alloc_info.allocationSize - memoryOffset, buffer_state->requirements.size);
4421             }
4422 
4423             // Validate dedicated allocation
4424             if (mem_info->is_dedicated && ((mem_info->dedicated_buffer != buffer) || (memoryOffset != 0))) {
4425                 // TODO: Add vkBindBufferMemory2KHR error message when added to spec.
4426                 auto validation_error = kVUIDUndefined;
4427                 if (strcmp(api_name, "vkBindBufferMemory()") == 0) {
4428                     validation_error = "VUID-vkBindBufferMemory-memory-01508";
4429                 }
4430                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
4431                                 validation_error,
4432                                 "%s: for dedicated %s, VkMemoryDedicatedAllocateInfoKHR::buffer %s must be equal "
4433                                 "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
4434                                 api_name, report_data->FormatHandle(mem).c_str(),
4435                                 report_data->FormatHandle(mem_info->dedicated_buffer).c_str(),
4436                                 report_data->FormatHandle(buffer).c_str(), memoryOffset);
4437             }
4438         }
4439     }
4440     return skip;
4441 }
4442 
PreCallValidateBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memoryOffset)4443 bool CoreChecks::PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
4444     const char *api_name = "vkBindBufferMemory()";
4445     return ValidateBindBufferMemory(buffer, mem, memoryOffset, api_name);
4446 }
4447 
UpdateBindBufferMemoryState(VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memoryOffset)4448 void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
4449     BUFFER_STATE *buffer_state = GetBufferState(buffer);
4450     if (buffer_state) {
4451         // Track bound memory range information
4452         auto mem_info = GetDevMemState(mem);
4453         if (mem_info) {
4454             InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
4455         }
4456         // Track objects tied to memory
4457         SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
4458     }
4459 }
4460 
PostCallRecordBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memoryOffset,VkResult result)4461 void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
4462                                                             VkDeviceSize memoryOffset, VkResult result) {
4463     if (VK_SUCCESS != result) return;
4464     UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
4465 }
4466 
PreCallValidateBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfoKHR * pBindInfos)4467 bool CoreChecks::PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
4468                                                   const VkBindBufferMemoryInfoKHR *pBindInfos) {
4469     char api_name[64];
4470     bool skip = false;
4471 
4472     for (uint32_t i = 0; i < bindInfoCount; i++) {
4473         sprintf(api_name, "vkBindBufferMemory2() pBindInfos[%u]", i);
4474         skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
4475     }
4476     return skip;
4477 }
4478 
PreCallValidateBindBufferMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfoKHR * pBindInfos)4479 bool CoreChecks::PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4480                                                      const VkBindBufferMemoryInfoKHR *pBindInfos) {
4481     char api_name[64];
4482     bool skip = false;
4483 
4484     for (uint32_t i = 0; i < bindInfoCount; i++) {
4485         sprintf(api_name, "vkBindBufferMemory2KHR() pBindInfos[%u]", i);
4486         skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
4487     }
4488     return skip;
4489 }
4490 
PostCallRecordBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfoKHR * pBindInfos,VkResult result)4491 void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
4492                                                              const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
4493     for (uint32_t i = 0; i < bindInfoCount; i++) {
4494         UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
4495     }
4496 }
4497 
PostCallRecordBindBufferMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfoKHR * pBindInfos,VkResult result)4498 void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4499                                                                 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
4500     for (uint32_t i = 0; i < bindInfoCount; i++) {
4501         UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
4502     }
4503 }
4504 
RecordGetBufferMemoryRequirementsState(VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)4505 void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
4506     BUFFER_STATE *buffer_state = GetBufferState(buffer);
4507     if (buffer_state) {
4508         buffer_state->requirements = *pMemoryRequirements;
4509         buffer_state->memory_requirements_checked = true;
4510     }
4511 }
4512 
PostCallRecordGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)4513 void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
4514                                                                        VkMemoryRequirements *pMemoryRequirements) {
4515     RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
4516 }
4517 
PostCallRecordGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2KHR * pInfo,VkMemoryRequirements2KHR * pMemoryRequirements)4518 void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
4519                                                                         const VkBufferMemoryRequirementsInfo2KHR *pInfo,
4520                                                                         VkMemoryRequirements2KHR *pMemoryRequirements) {
4521     RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
4522 }
4523 
PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,const VkBufferMemoryRequirementsInfo2KHR * pInfo,VkMemoryRequirements2KHR * pMemoryRequirements)4524 void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
4525                                                                            const VkBufferMemoryRequirementsInfo2KHR *pInfo,
4526                                                                            VkMemoryRequirements2KHR *pMemoryRequirements) {
4527     RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
4528 }
4529 
ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2 * pInfo) const4530 bool CoreChecks::ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2 *pInfo) const {
4531     bool skip = false;
4532     if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4533         skip |= ValidateGetImageMemoryRequirements2ANDROID(pInfo->image);
4534     }
4535     return skip;
4536 }
4537 
PreCallValidateGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4538 bool CoreChecks::PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4539                                                             VkMemoryRequirements2 *pMemoryRequirements) {
4540     return ValidateGetImageMemoryRequirements2(pInfo);
4541 }
4542 
PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4543 bool CoreChecks::PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4544                                                                VkMemoryRequirements2 *pMemoryRequirements) {
4545     return ValidateGetImageMemoryRequirements2(pInfo);
4546 }
4547 
RecordGetImageMemoryRequiementsState(VkImage image,VkMemoryRequirements * pMemoryRequirements)4548 void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
4549     IMAGE_STATE *image_state = GetImageState(image);
4550     if (image_state) {
4551         image_state->requirements = *pMemoryRequirements;
4552         image_state->memory_requirements_checked = true;
4553     }
4554 }
4555 
PostCallRecordGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)4556 void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
4557                                                                       VkMemoryRequirements *pMemoryRequirements) {
4558     RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
4559 }
4560 
PostCallRecordGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4561 void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4562                                                                        VkMemoryRequirements2 *pMemoryRequirements) {
4563     RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
4564 }
4565 
PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4566 void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
4567                                                                           const VkImageMemoryRequirementsInfo2 *pInfo,
4568                                                                           VkMemoryRequirements2 *pMemoryRequirements) {
4569     RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
4570 }
4571 
RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE * image_state,VkSparseImageMemoryRequirements * sparse_image_memory_requirements)4572 static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
4573                                                         VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
4574     image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
4575     if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
4576         image_state->sparse_metadata_required = true;
4577     }
4578 }
4579 
PostCallRecordGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)4580 void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
4581     VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
4582     VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
4583     auto image_state = GetImageState(image);
4584     image_state->get_sparse_reqs_called = true;
4585     if (!pSparseMemoryRequirements) return;
4586     for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
4587         RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
4588     }
4589 }
4590 
PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2KHR * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2KHR * pSparseMemoryRequirements)4591 void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
4592     VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
4593     VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
4594     auto image_state = GetImageState(pInfo->image);
4595     image_state->get_sparse_reqs_called = true;
4596     if (!pSparseMemoryRequirements) return;
4597     for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
4598         assert(!pSparseMemoryRequirements[i].pNext);  // TODO: If an extension is ever added here we need to handle it
4599         RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
4600     }
4601 }
4602 
PostCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device,const VkImageSparseMemoryRequirementsInfo2KHR * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2KHR * pSparseMemoryRequirements)4603 void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
4604     VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
4605     VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
4606     auto image_state = GetImageState(pInfo->image);
4607     image_state->get_sparse_reqs_called = true;
4608     if (!pSparseMemoryRequirements) return;
4609     for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
4610         assert(!pSparseMemoryRequirements[i].pNext);  // TODO: If an extension is ever added here we need to handle it
4611         RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
4612     }
4613 }
4614 
PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4615 bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
4616                                                                         const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
4617                                                                         VkImageFormatProperties2 *pImageFormatProperties) {
4618     // Can't wrap AHB-specific validation in a device extension check here, but no harm
4619     bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(report_data, pImageFormatInfo, pImageFormatProperties);
4620     return skip;
4621 }
4622 
PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4623 bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
4624                                                                            const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
4625                                                                            VkImageFormatProperties2 *pImageFormatProperties) {
4626     // Can't wrap AHB-specific validation in a device extension check here, but no harm
4627     bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(report_data, pImageFormatInfo, pImageFormatProperties);
4628     return skip;
4629 }
4630 
PreCallRecordDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)4631 void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
4632                                                               const VkAllocationCallbacks *pAllocator) {
4633     if (!shaderModule) return;
4634     shaderModuleMap.erase(shaderModule);
4635 }
4636 
PreCallValidateDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)4637 bool CoreChecks::PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
4638     PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
4639     const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
4640     bool skip = false;
4641     if (pipeline_state) {
4642         skip |= ValidateObjectNotInUse(pipeline_state, obj_struct, "vkDestroyPipeline", "VUID-vkDestroyPipeline-pipeline-00765");
4643     }
4644     return skip;
4645 }
4646 
PreCallRecordDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)4647 void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
4648                                                           const VkAllocationCallbacks *pAllocator) {
4649     if (!pipeline) return;
4650     PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
4651     const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
4652     // Any bound cmd buffers are now invalid
4653     InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
4654     pipelineMap.erase(pipeline);
4655 }
4656 
PreCallRecordDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)4657 void CoreChecks::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
4658     if (pipeline && enabled.gpu_validation) {
4659         GpuPreCallRecordDestroyPipeline(pipeline);
4660     }
4661 
4662     StateTracker::PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
4663 }
4664 
PreCallRecordDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)4665 void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
4666                                                                 const VkAllocationCallbacks *pAllocator) {
4667     if (!pipelineLayout) return;
4668     pipelineLayoutMap.erase(pipelineLayout);
4669 }
4670 
PreCallValidateDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)4671 bool CoreChecks::PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
4672     const SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
4673     const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
4674     bool skip = false;
4675     if (sampler_state) {
4676         skip |= ValidateObjectNotInUse(sampler_state, obj_struct, "vkDestroySampler", "VUID-vkDestroySampler-sampler-01082");
4677     }
4678     return skip;
4679 }
4680 
PreCallRecordDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)4681 void ValidationStateTracker ::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
4682                                                           const VkAllocationCallbacks *pAllocator) {
4683     if (!sampler) return;
4684     SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
4685     const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
4686     // Any bound cmd buffers are now invalid
4687     if (sampler_state) {
4688         InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
4689     }
4690     samplerMap.erase(sampler);
4691 }
4692 
PreCallRecordDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)4693 void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
4694                                                                      const VkAllocationCallbacks *pAllocator) {
4695     if (!descriptorSetLayout) return;
4696     auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
4697     if (layout_it != descriptorSetLayoutMap.end()) {
4698         layout_it->second.get()->MarkDestroyed();
4699         descriptorSetLayoutMap.erase(layout_it);
4700     }
4701 }
4702 
PreCallValidateDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)4703 bool CoreChecks::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
4704                                                       const VkAllocationCallbacks *pAllocator) {
4705     DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
4706     const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
4707     bool skip = false;
4708     if (desc_pool_state) {
4709         skip |= ValidateObjectNotInUse(desc_pool_state, obj_struct, "vkDestroyDescriptorPool",
4710                                        "VUID-vkDestroyDescriptorPool-descriptorPool-00303");
4711     }
4712     return skip;
4713 }
4714 
PreCallRecordDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)4715 void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
4716                                                                 const VkAllocationCallbacks *pAllocator) {
4717     if (!descriptorPool) return;
4718     DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
4719     const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
4720     if (desc_pool_state) {
4721         // Any bound cmd buffers are now invalid
4722         InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
4723         // Free sets that were in this pool
4724         for (auto ds : desc_pool_state->sets) {
4725             FreeDescriptorSet(ds);
4726         }
4727         descriptorPoolMap.erase(descriptorPool);
4728     }
4729 }
4730 
4731 // Verify cmdBuffer in given cb_node is not in global in-flight set, and return skip result
4732 //  If this is a secondary command buffer, then make sure its primary is also in-flight
4733 //  If primary is not in-flight, then remove secondary from global in-flight set
4734 // This function is only valid at a point when cmdBuffer is being reset or freed
CheckCommandBufferInFlight(const CMD_BUFFER_STATE * cb_node,const char * action,const char * error_code) const4735 bool CoreChecks::CheckCommandBufferInFlight(const CMD_BUFFER_STATE *cb_node, const char *action, const char *error_code) const {
4736     bool skip = false;
4737     if (cb_node->in_use.load()) {
4738         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
4739                         HandleToUint64(cb_node->commandBuffer), error_code, "Attempt to %s %s which is in use.", action,
4740                         report_data->FormatHandle(cb_node->commandBuffer).c_str());
4741     }
4742     return skip;
4743 }
4744 
4745 // Iterate over all cmdBuffers in given commandPool and verify that each is not in use
CheckCommandBuffersInFlight(const COMMAND_POOL_STATE * pPool,const char * action,const char * error_code) const4746 bool CoreChecks::CheckCommandBuffersInFlight(const COMMAND_POOL_STATE *pPool, const char *action, const char *error_code) const {
4747     bool skip = false;
4748     for (auto cmd_buffer : pPool->commandBuffers) {
4749         skip |= CheckCommandBufferInFlight(GetCBState(cmd_buffer), action, error_code);
4750     }
4751     return skip;
4752 }
4753 
4754 // Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
FreeCommandBufferStates(COMMAND_POOL_STATE * pool_state,const uint32_t command_buffer_count,const VkCommandBuffer * command_buffers)4755 void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
4756                                                      const VkCommandBuffer *command_buffers) {
4757     for (uint32_t i = 0; i < command_buffer_count; i++) {
4758         auto cb_state = GetCBState(command_buffers[i]);
4759         // Remove references to command buffer's state and delete
4760         if (cb_state) {
4761             // reset prior to delete, removing various references to it.
4762             // TODO: fix this, it's insane.
4763             ResetCommandBufferState(cb_state->commandBuffer);
4764             // Remove the cb_state's references from COMMAND_POOL_STATEs
4765             pool_state->commandBuffers.erase(command_buffers[i]);
4766             // Remove the cb debug labels
4767             EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
4768             // Remove CBState from CB map
4769             commandBufferMap.erase(cb_state->commandBuffer);
4770         }
4771     }
4772 }
4773 
PreCallValidateFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)4774 bool CoreChecks::PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
4775                                                    const VkCommandBuffer *pCommandBuffers) {
4776     bool skip = false;
4777     for (uint32_t i = 0; i < commandBufferCount; i++) {
4778         const auto *cb_node = GetCBState(pCommandBuffers[i]);
4779         // Delete CB information structure, and remove from commandBufferMap
4780         if (cb_node) {
4781             skip |= CheckCommandBufferInFlight(cb_node, "free", "VUID-vkFreeCommandBuffers-pCommandBuffers-00047");
4782         }
4783     }
4784     return skip;
4785 }
4786 
PreCallRecordFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)4787 void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
4788                                                              uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
4789     auto pPool = GetCommandPoolState(commandPool);
4790     FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
4791 }
4792 
PreCallValidateCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)4793 bool CoreChecks::PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
4794                                                   const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
4795     return ValidateDeviceQueueFamily(pCreateInfo->queueFamilyIndex, "vkCreateCommandPool", "pCreateInfo->queueFamilyIndex",
4796                                      "VUID-vkCreateCommandPool-queueFamilyIndex-01937");
4797 }
4798 
PostCallRecordCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool,VkResult result)4799 void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
4800                                                              const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
4801                                                              VkResult result) {
4802     if (VK_SUCCESS != result) return;
4803     std::unique_ptr<COMMAND_POOL_STATE> cmd_pool_state(new COMMAND_POOL_STATE{});
4804     cmd_pool_state->createFlags = pCreateInfo->flags;
4805     cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
4806     commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
4807 }
4808 
PreCallValidateCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)4809 bool CoreChecks::PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
4810                                                 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
4811     if (disabled.query_validation) return false;
4812     bool skip = false;
4813     if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) {
4814         if (!enabled_features.core.pipelineStatisticsQuery) {
4815             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
4816                             "VUID-VkQueryPoolCreateInfo-queryType-00791",
4817                             "Query pool with type VK_QUERY_TYPE_PIPELINE_STATISTICS created on a device with "
4818                             "VkDeviceCreateInfo.pEnabledFeatures.pipelineStatisticsQuery == VK_FALSE.");
4819         }
4820     }
4821     return skip;
4822 }
4823 
PostCallRecordCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool,VkResult result)4824 void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
4825                                                            const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
4826                                                            VkResult result) {
4827     if (VK_SUCCESS != result) return;
4828     std::unique_ptr<QUERY_POOL_STATE> query_pool_state(new QUERY_POOL_STATE{});
4829     query_pool_state->createInfo = *pCreateInfo;
4830     queryPoolMap[*pQueryPool] = std::move(query_pool_state);
4831 
4832     QueryObject query_obj{*pQueryPool, 0u};
4833     for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
4834         query_obj.query = i;
4835         queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
4836     }
4837 }
4838 
PreCallValidateDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)4839 bool CoreChecks::PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
4840                                                    const VkAllocationCallbacks *pAllocator) {
4841     const COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
4842     bool skip = false;
4843     if (cp_state) {
4844         // Verify that command buffers in pool are complete (not in-flight)
4845         skip |= CheckCommandBuffersInFlight(cp_state, "destroy command pool with", "VUID-vkDestroyCommandPool-commandPool-00041");
4846     }
4847     return skip;
4848 }
4849 
PreCallRecordDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)4850 void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
4851                                                              const VkAllocationCallbacks *pAllocator) {
4852     if (!commandPool) return;
4853     COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
4854     // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
4855     // "When a pool is destroyed, all command buffers allocated from the pool are freed."
4856     if (cp_state) {
4857         // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
4858         std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
4859         FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
4860         commandPoolMap.erase(commandPool);
4861     }
4862 }
4863 
PreCallValidateResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)4864 bool CoreChecks::PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
4865     const auto *command_pool_state = GetCommandPoolState(commandPool);
4866     return CheckCommandBuffersInFlight(command_pool_state, "reset command pool with", "VUID-vkResetCommandPool-commandPool-00040");
4867 }
4868 
PostCallRecordResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags,VkResult result)4869 void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
4870                                                             VkCommandPoolResetFlags flags, VkResult result) {
4871     if (VK_SUCCESS != result) return;
4872     // Reset all of the CBs allocated from this pool
4873     auto command_pool_state = GetCommandPoolState(commandPool);
4874     for (auto cmdBuffer : command_pool_state->commandBuffers) {
4875         ResetCommandBufferState(cmdBuffer);
4876     }
4877 }
4878 
PreCallValidateResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)4879 bool CoreChecks::PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
4880     bool skip = false;
4881     for (uint32_t i = 0; i < fenceCount; ++i) {
4882         auto pFence = GetFenceState(pFences[i]);
4883         if (pFence && pFence->scope == kSyncScopeInternal && pFence->state == FENCE_INFLIGHT) {
4884             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
4885                             HandleToUint64(pFences[i]), "VUID-vkResetFences-pFences-01123", "%s is in use.",
4886                             report_data->FormatHandle(pFences[i]).c_str());
4887         }
4888     }
4889     return skip;
4890 }
4891 
PostCallRecordResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkResult result)4892 void CoreChecks::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkResult result) {
4893     for (uint32_t i = 0; i < fenceCount; ++i) {
4894         auto pFence = GetFenceState(pFences[i]);
4895         if (pFence) {
4896             if (pFence->scope == kSyncScopeInternal) {
4897                 pFence->state = FENCE_UNSIGNALED;
4898             } else if (pFence->scope == kSyncScopeExternalTemporary) {
4899                 pFence->scope = kSyncScopeInternal;
4900             }
4901         }
4902     }
4903 }
4904 
4905 // For given cb_nodes, invalidate them and track object causing invalidation
InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE * > const & cb_nodes,const VulkanTypedHandle & obj)4906 void ValidationStateTracker::InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> const &cb_nodes,
4907                                                       const VulkanTypedHandle &obj) {
4908     for (auto cb_node : cb_nodes) {
4909         if (cb_node->state == CB_RECORDING) {
4910             cb_node->state = CB_INVALID_INCOMPLETE;
4911         } else if (cb_node->state == CB_RECORDED) {
4912             cb_node->state = CB_INVALID_COMPLETE;
4913         }
4914         cb_node->broken_bindings.push_back(obj);
4915 
4916         // if secondary, then propagate the invalidation to the primaries that will call us.
4917         if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
4918             InvalidateCommandBuffers(cb_node->linkedCommandBuffers, obj);
4919         }
4920     }
4921 }
4922 
PreCallValidateDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)4923 bool CoreChecks::PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
4924                                                    const VkAllocationCallbacks *pAllocator) {
4925     const FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
4926     const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
4927     bool skip = false;
4928     if (framebuffer_state) {
4929         skip |= ValidateObjectNotInUse(framebuffer_state, obj_struct, "vkDestroyFramebuffer",
4930                                        "VUID-vkDestroyFramebuffer-framebuffer-00892");
4931     }
4932     return skip;
4933 }
4934 
PreCallRecordDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)4935 void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
4936                                                              const VkAllocationCallbacks *pAllocator) {
4937     if (!framebuffer) return;
4938     FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
4939     const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
4940     InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
4941     frameBufferMap.erase(framebuffer);
4942 }
4943 
PreCallValidateDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)4944 bool CoreChecks::PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
4945                                                   const VkAllocationCallbacks *pAllocator) {
4946     const RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
4947     const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
4948     bool skip = false;
4949     if (rp_state) {
4950         skip |= ValidateObjectNotInUse(rp_state, obj_struct, "vkDestroyRenderPass", "VUID-vkDestroyRenderPass-renderPass-00873");
4951     }
4952     return skip;
4953 }
4954 
PreCallRecordDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)4955 void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
4956                                                             const VkAllocationCallbacks *pAllocator) {
4957     if (!renderPass) return;
4958     RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
4959     const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
4960     InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
4961     renderPassMap.erase(renderPass);
4962 }
4963 
4964 // Access helper functions for external modules
GetPDFormatProperties(const VkFormat format) const4965 VkFormatProperties CoreChecks::GetPDFormatProperties(const VkFormat format) const {
4966     VkFormatProperties format_properties;
4967     DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
4968     return format_properties;
4969 }
4970 
GetPDImageFormatProperties(const VkImageCreateInfo * image_ci,VkImageFormatProperties * pImageFormatProperties)4971 VkResult CoreChecks::GetPDImageFormatProperties(const VkImageCreateInfo *image_ci,
4972                                                 VkImageFormatProperties *pImageFormatProperties) {
4973     return DispatchGetPhysicalDeviceImageFormatProperties(physical_device, image_ci->format, image_ci->imageType, image_ci->tiling,
4974                                                           image_ci->usage, image_ci->flags, pImageFormatProperties);
4975 }
4976 
GetPDImageFormatProperties2(const VkPhysicalDeviceImageFormatInfo2 * phys_dev_image_fmt_info,VkImageFormatProperties2 * pImageFormatProperties) const4977 VkResult CoreChecks::GetPDImageFormatProperties2(const VkPhysicalDeviceImageFormatInfo2 *phys_dev_image_fmt_info,
4978                                                  VkImageFormatProperties2 *pImageFormatProperties) const {
4979     if (!instance_extensions.vk_khr_get_physical_device_properties_2) return VK_ERROR_EXTENSION_NOT_PRESENT;
4980     return DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, phys_dev_image_fmt_info, pImageFormatProperties);
4981 }
4982 
PostCallRecordCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence,VkResult result)4983 void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
4984                                                        const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
4985     if (VK_SUCCESS != result) return;
4986     std::unique_ptr<FENCE_STATE> fence_state(new FENCE_STATE{});
4987     fence_state->fence = *pFence;
4988     fence_state->createInfo = *pCreateInfo;
4989     fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
4990     fenceMap[*pFence] = std::move(fence_state);
4991 }
4992 
4993 // Validation cache:
4994 // CV is the bottommost implementor of this extension. Don't pass calls down.
4995 // utility function to set collective state for pipeline
SetPipelineState(PIPELINE_STATE * pPipe)4996 void SetPipelineState(PIPELINE_STATE *pPipe) {
4997     // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
4998     if (pPipe->graphicsPipelineCI.pColorBlendState) {
4999         for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
5000             if (VK_TRUE == pPipe->attachments[i].blendEnable) {
5001                 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
5002                      (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
5003                     ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
5004                      (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
5005                     ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
5006                      (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
5007                     ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
5008                      (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
5009                     pPipe->blendConstantsEnabled = true;
5010                 }
5011             }
5012         }
5013     }
5014 }
5015 
ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPELINE_STATE>> const & pipe_state_vec,const uint32_t count,const VkGraphicsPipelineCreateInfo * pipe_cis) const5016 bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPELINE_STATE>> const &pipe_state_vec,
5017                                                 const uint32_t count, const VkGraphicsPipelineCreateInfo *pipe_cis) const {
5018     bool skip = false;
5019     const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
5020 
5021     for (uint32_t i = 0; i < count; i++) {
5022         auto pvids_ci = lvl_find_in_chain<VkPipelineVertexInputDivisorStateCreateInfoEXT>(pipe_cis[i].pVertexInputState->pNext);
5023         if (nullptr == pvids_ci) continue;
5024 
5025         const PIPELINE_STATE *pipe_state = pipe_state_vec[i].get();
5026         for (uint32_t j = 0; j < pvids_ci->vertexBindingDivisorCount; j++) {
5027             const VkVertexInputBindingDivisorDescriptionEXT *vibdd = &(pvids_ci->pVertexBindingDivisors[j]);
5028             if (vibdd->binding >= device_limits->maxVertexInputBindings) {
5029                 skip |= log_msg(
5030                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5031                     "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
5032                     "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
5033                     "pVertexBindingDivisors[%1u] binding index of (%1u) exceeds device maxVertexInputBindings (%1u).",
5034                     i, j, vibdd->binding, device_limits->maxVertexInputBindings);
5035             }
5036             if (vibdd->divisor > phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor) {
5037                 skip |= log_msg(
5038                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5039                     "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
5040                     "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
5041                     "pVertexBindingDivisors[%1u] divisor of (%1u) exceeds extension maxVertexAttribDivisor (%1u).",
5042                     i, j, vibdd->divisor, phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor);
5043             }
5044             if ((0 == vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateZeroDivisor) {
5045                 skip |= log_msg(
5046                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5047                     "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228",
5048                     "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
5049                     "pVertexBindingDivisors[%1u] divisor must not be 0 when vertexAttributeInstanceRateZeroDivisor feature is not "
5050                     "enabled.",
5051                     i, j);
5052             }
5053             if ((1 != vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateDivisor) {
5054                 skip |= log_msg(
5055                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5056                     "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229",
5057                     "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
5058                     "pVertexBindingDivisors[%1u] divisor (%1u) must be 1 when vertexAttributeInstanceRateDivisor feature is not "
5059                     "enabled.",
5060                     i, j, vibdd->divisor);
5061             }
5062 
5063             // Find the corresponding binding description and validate input rate setting
5064             bool failed_01871 = true;
5065             for (size_t k = 0; k < pipe_state->vertex_binding_descriptions_.size(); k++) {
5066                 if ((vibdd->binding == pipe_state->vertex_binding_descriptions_[k].binding) &&
5067                     (VK_VERTEX_INPUT_RATE_INSTANCE == pipe_state->vertex_binding_descriptions_[k].inputRate)) {
5068                     failed_01871 = false;
5069                     break;
5070                 }
5071             }
5072             if (failed_01871) {  // Description not found, or has incorrect inputRate value
5073                 skip |= log_msg(
5074                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5075                     "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871",
5076                     "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
5077                     "pVertexBindingDivisors[%1u] specifies binding index (%1u), but that binding index's "
5078                     "VkVertexInputBindingDescription.inputRate member is not VK_VERTEX_INPUT_RATE_INSTANCE.",
5079                     i, j, vibdd->binding);
5080             }
5081         }
5082     }
5083     return skip;
5084 }
5085 
PreCallValidateCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * cgpl_state_data)5086 bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5087                                                                     const VkGraphicsPipelineCreateInfo *pCreateInfos,
5088                                                                     const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5089                                                                     void *cgpl_state_data) {
5090     // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
5091     create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
5092     cgpl_state->pCreateInfos = pCreateInfos;  // GPU validation can alter this, so we have to set a default value for the Chassis
5093     cgpl_state->pipe_state.reserve(count);
5094     for (uint32_t i = 0; i < count; i++) {
5095         cgpl_state->pipe_state.push_back(std::unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
5096         (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i],
5097                                                           GetRenderPassStateSharedPtr(pCreateInfos[i].renderPass));
5098         (cgpl_state->pipe_state)[i]->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
5099     }
5100     return false;
5101 }
5102 
PreCallValidateCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * cgpl_state_data)5103 bool CoreChecks::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5104                                                         const VkGraphicsPipelineCreateInfo *pCreateInfos,
5105                                                         const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5106                                                         void *cgpl_state_data) {
5107     bool skip = StateTracker::PreCallValidateCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
5108                                                                      pPipelines, cgpl_state_data);
5109     create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
5110 
5111     for (uint32_t i = 0; i < count; i++) {
5112         skip |= ValidatePipelineLocked(cgpl_state->pipe_state, i);
5113     }
5114 
5115     for (uint32_t i = 0; i < count; i++) {
5116         skip |= ValidatePipelineUnlocked(cgpl_state->pipe_state[i].get(), i);
5117     }
5118 
5119     if (device_extensions.vk_ext_vertex_attribute_divisor) {
5120         skip |= ValidatePipelineVertexDivisors(cgpl_state->pipe_state, count, pCreateInfos);
5121     }
5122 
5123     return skip;
5124 }
5125 
5126 // GPU validation may replace pCreateInfos for the down-chain call
PreCallRecordCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * cgpl_state_data)5127 void CoreChecks::PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5128                                                       const VkGraphicsPipelineCreateInfo *pCreateInfos,
5129                                                       const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5130                                                       void *cgpl_state_data) {
5131     // GPU Validation may replace instrumented shaders with non-instrumented ones, so allow it to modify the createinfos.
5132     if (enabled.gpu_validation) {
5133         create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
5134         cgpl_state->gpu_create_infos = GpuPreCallRecordCreateGraphicsPipelines(pipelineCache, count, pCreateInfos, pAllocator,
5135                                                                                pPipelines, cgpl_state->pipe_state);
5136         cgpl_state->pCreateInfos = reinterpret_cast<VkGraphicsPipelineCreateInfo *>(cgpl_state->gpu_create_infos.data());
5137     }
5138 }
5139 
PostCallRecordCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,VkResult result,void * cgpl_state_data)5140 void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5141                                                                    const VkGraphicsPipelineCreateInfo *pCreateInfos,
5142                                                                    const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5143                                                                    VkResult result, void *cgpl_state_data) {
5144     create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
5145     // This API may create pipelines regardless of the return value
5146     for (uint32_t i = 0; i < count; i++) {
5147         if (pPipelines[i] != VK_NULL_HANDLE) {
5148             (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
5149             pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
5150         }
5151     }
5152     cgpl_state->pipe_state.clear();
5153 }
5154 
PostCallRecordCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,VkResult result,void * cgpl_state_data)5155 void CoreChecks::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5156                                                        const VkGraphicsPipelineCreateInfo *pCreateInfos,
5157                                                        const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5158                                                        VkResult result, void *cgpl_state_data) {
5159     StateTracker::PostCallRecordCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines, result,
5160                                                         cgpl_state_data);
5161     // GPU val needs clean up regardless of result
5162     if (enabled.gpu_validation) {
5163         create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
5164         GpuPostCallRecordCreateGraphicsPipelines(count, pCreateInfos, pAllocator, pPipelines);
5165         cgpl_state->gpu_create_infos.clear();
5166     }
5167 }
5168 
PreCallValidateCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * ccpl_state_data)5169 bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5170                                                                    const VkComputePipelineCreateInfo *pCreateInfos,
5171                                                                    const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5172                                                                    void *ccpl_state_data) {
5173     auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
5174     ccpl_state->pCreateInfos = pCreateInfos;  // GPU validation can alter this, so we have to set a default value for the Chassis
5175     ccpl_state->pipe_state.reserve(count);
5176     for (uint32_t i = 0; i < count; i++) {
5177         // Create and initialize internal tracking data structure
5178         ccpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
5179         ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
5180         ccpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
5181     }
5182     return false;
5183 }
5184 
PreCallValidateCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * ccpl_state_data)5185 bool CoreChecks::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5186                                                        const VkComputePipelineCreateInfo *pCreateInfos,
5187                                                        const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5188                                                        void *ccpl_state_data) {
5189     bool skip = StateTracker::PreCallValidateCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
5190                                                                     pPipelines, ccpl_state_data);
5191 
5192     auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
5193     for (uint32_t i = 0; i < count; i++) {
5194         // TODO: Add Compute Pipeline Verification
5195         skip |= ValidateComputePipeline(ccpl_state->pipe_state.back().get());
5196     }
5197     return skip;
5198 }
5199 
PreCallRecordCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * ccpl_state_data)5200 void CoreChecks::PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5201                                                      const VkComputePipelineCreateInfo *pCreateInfos,
5202                                                      const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5203                                                      void *ccpl_state_data) {
5204     // GPU Validation may replace instrumented shaders with non-instrumented ones, so allow it to modify the createinfos.
5205     if (enabled.gpu_validation) {
5206         auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
5207         ccpl_state->gpu_create_infos = GpuPreCallRecordCreateComputePipelines(pipelineCache, count, pCreateInfos, pAllocator,
5208                                                                               pPipelines, ccpl_state->pipe_state);
5209         ccpl_state->pCreateInfos = reinterpret_cast<VkComputePipelineCreateInfo *>(ccpl_state->gpu_create_infos.data());
5210     }
5211 }
5212 
PostCallRecordCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,VkResult result,void * ccpl_state_data)5213 void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5214                                                                   const VkComputePipelineCreateInfo *pCreateInfos,
5215                                                                   const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5216                                                                   VkResult result, void *ccpl_state_data) {
5217     create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
5218 
5219     // This API may create pipelines regardless of the return value
5220     for (uint32_t i = 0; i < count; i++) {
5221         if (pPipelines[i] != VK_NULL_HANDLE) {
5222             (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
5223             pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
5224         }
5225     }
5226     ccpl_state->pipe_state.clear();
5227 }
5228 
PostCallRecordCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,VkResult result,void * ccpl_state_data)5229 void CoreChecks::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5230                                                       const VkComputePipelineCreateInfo *pCreateInfos,
5231                                                       const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5232                                                       VkResult result, void *ccpl_state_data) {
5233     StateTracker::PostCallRecordCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines, result,
5234                                                        ccpl_state_data);
5235 
5236     // GPU val needs clean up regardless of result
5237     if (enabled.gpu_validation) {
5238         create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
5239         GpuPostCallRecordCreateComputePipelines(count, pCreateInfos, pAllocator, pPipelines);
5240         ccpl_state->gpu_create_infos.clear();
5241     }
5242 }
5243 
PreCallValidateCreateRayTracingPipelinesNV(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoNV * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * crtpl_state_data)5244 bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
5245                                                                         uint32_t count,
5246                                                                         const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
5247                                                                         const VkAllocationCallbacks *pAllocator,
5248                                                                         VkPipeline *pPipelines, void *crtpl_state_data) {
5249     auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
5250     crtpl_state->pipe_state.reserve(count);
5251     for (uint32_t i = 0; i < count; i++) {
5252         // Create and initialize internal tracking data structure
5253         crtpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
5254         crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
5255         crtpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
5256     }
5257     return false;
5258 }
5259 
PreCallValidateCreateRayTracingPipelinesNV(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoNV * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * crtpl_state_data)5260 bool CoreChecks::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5261                                                             const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
5262                                                             const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5263                                                             void *crtpl_state_data) {
5264     bool skip = StateTracker::PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
5265                                                                          pPipelines, crtpl_state_data);
5266 
5267     auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
5268     for (uint32_t i = 0; i < count; i++) {
5269         skip |= ValidateRayTracingPipelineNV(crtpl_state->pipe_state[i].get());
5270     }
5271     return skip;
5272 }
5273 
PreCallRecordCreateRayTracingPipelinesNV(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoNV * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * crtpl_state_data)5274 void CoreChecks::PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5275                                                           const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
5276                                                           const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5277                                                           void *crtpl_state_data) {
5278     // GPU Validation may replace instrumented shaders with non-instrumented ones, so allow it to modify the createinfos.
5279     if (enabled.gpu_validation) {
5280         auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
5281         crtpl_state->gpu_create_infos = GpuPreCallRecordCreateRayTracingPipelinesNV(pipelineCache, count, pCreateInfos, pAllocator,
5282                                                                                     pPipelines, crtpl_state->pipe_state);
5283         crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(crtpl_state->gpu_create_infos.data());
5284     }
5285 }
5286 
PostCallRecordCreateRayTracingPipelinesNV(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoNV * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,VkResult result,void * crtpl_state_data)5287 void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
5288     VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
5289     const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
5290     auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
5291     // This API may create pipelines regardless of the return value
5292     for (uint32_t i = 0; i < count; i++) {
5293         if (pPipelines[i] != VK_NULL_HANDLE) {
5294             (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
5295             pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
5296         }
5297     }
5298     crtpl_state->pipe_state.clear();
5299 }
5300 
PostCallRecordCreateRayTracingPipelinesNV(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoNV * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,VkResult result,void * crtpl_state_data)5301 void CoreChecks::PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
5302                                                            const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
5303                                                            const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
5304                                                            VkResult result, void *crtpl_state_data) {
5305     StateTracker::PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines,
5306                                                             result, crtpl_state_data);
5307     // GPU val needs clean up regardless of result
5308     if (enabled.gpu_validation) {
5309         auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
5310         GpuPostCallRecordCreateRayTracingPipelinesNV(count, pCreateInfos, pAllocator, pPipelines);
5311         crtpl_state->gpu_create_infos.clear();
5312     }
5313 }
5314 
PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device,const VkPipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VkPipelineExecutablePropertiesKHR * pProperties)5315 bool CoreChecks::PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR *pPipelineInfo,
5316                                                                    uint32_t *pExecutableCount,
5317                                                                    VkPipelineExecutablePropertiesKHR *pProperties) {
5318     bool skip = false;
5319 
5320     if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
5321         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5322                         "VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableProperties-03270",
5323                         "vkGetPipelineExecutablePropertiesKHR called when pipelineExecutableInfo feature is not enabled.");
5324     }
5325 
5326     return skip;
5327 }
5328 
ValidatePipelineExecutableInfo(VkDevice device,const VkPipelineExecutableInfoKHR * pExecutableInfo) const5329 bool CoreChecks::ValidatePipelineExecutableInfo(VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo) const {
5330     bool skip = false;
5331 
5332     if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
5333         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
5334                         "VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272",
5335                         "vkGetPipelineExecutableStatisticsKHR called when pipelineExecutableInfo feature is not enabled.");
5336     }
5337 
5338     VkPipelineInfoKHR pi = {};
5339     pi.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
5340     pi.pipeline = pExecutableInfo->pipeline;
5341 
5342     // We could probably cache this instead of fetching it every time
5343     uint32_t executableCount = 0;
5344     DispatchGetPipelineExecutablePropertiesKHR(device, &pi, &executableCount, NULL);
5345 
5346     if (pExecutableInfo->executableIndex >= executableCount) {
5347         skip |=
5348             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
5349                     HandleToUint64(pExecutableInfo->pipeline), "VUID-VkPipelineExecutableInfoKHR-executableIndex-03275",
5350                     "VkPipelineExecutableInfo::executableIndex (%1u) must be less than the number of executables associated with "
5351                     "the pipeline (%1u) as returned by vkGetPipelineExecutablePropertiessKHR",
5352                     pExecutableInfo->executableIndex, executableCount);
5353     }
5354 
5355     return skip;
5356 }
5357 
PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device,const VkPipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VkPipelineExecutableStatisticKHR * pStatistics)5358 bool CoreChecks::PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device,
5359                                                                    const VkPipelineExecutableInfoKHR *pExecutableInfo,
5360                                                                    uint32_t *pStatisticCount,
5361                                                                    VkPipelineExecutableStatisticKHR *pStatistics) {
5362     bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
5363 
5364     const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
5365     if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR)) {
5366         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
5367                         HandleToUint64(pExecutableInfo->pipeline), "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274",
5368                         "vkGetPipelineExecutableStatisticsKHR called on a pipeline created without the "
5369                         "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR flag set");
5370     }
5371 
5372     return skip;
5373 }
5374 
PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(VkDevice device,const VkPipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VkPipelineExecutableInternalRepresentationKHR * pStatistics)5375 bool CoreChecks::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
5376     VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pInternalRepresentationCount,
5377     VkPipelineExecutableInternalRepresentationKHR *pStatistics) {
5378     bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
5379 
5380     const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
5381     if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR)) {
5382         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
5383                         HandleToUint64(pExecutableInfo->pipeline),
5384                         "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278",
5385                         "vkGetPipelineExecutableInternalRepresentationsKHR called on a pipeline created without the "
5386                         "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR flag set");
5387     }
5388 
5389     return skip;
5390 }
5391 
PostCallRecordCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler,VkResult result)5392 void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
5393                                                          const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
5394                                                          VkResult result) {
5395     samplerMap[*pSampler] = unique_ptr<SAMPLER_STATE>(new SAMPLER_STATE(pSampler, pCreateInfo));
5396 }
5397 
PreCallValidateCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)5398 bool CoreChecks::PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
5399                                                           const VkAllocationCallbacks *pAllocator,
5400                                                           VkDescriptorSetLayout *pSetLayout) {
5401     return cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
5402         report_data, pCreateInfo, device_extensions.vk_khr_push_descriptor, phys_dev_ext_props.max_push_descriptors,
5403         device_extensions.vk_ext_descriptor_indexing, &enabled_features.descriptor_indexing, &enabled_features.inline_uniform_block,
5404         &phys_dev_ext_props.inline_uniform_block_props, &device_extensions);
5405 }
5406 
PostCallRecordCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout,VkResult result)5407 void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
5408                                                                      const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
5409                                                                      const VkAllocationCallbacks *pAllocator,
5410                                                                      VkDescriptorSetLayout *pSetLayout, VkResult result) {
5411     if (VK_SUCCESS != result) return;
5412     descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
5413 }
5414 
5415 // Used by CreatePipelineLayout and CmdPushConstants.
5416 // Note that the index argument is optional and only used by CreatePipelineLayout.
ValidatePushConstantRange(const uint32_t offset,const uint32_t size,const char * caller_name,uint32_t index=0) const5417 bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t size, const char *caller_name,
5418                                            uint32_t index = 0) const {
5419     if (disabled.push_constant_range) return false;
5420     uint32_t const maxPushConstantsSize = phys_dev_props.limits.maxPushConstantsSize;
5421     bool skip = false;
5422     // Check that offset + size don't exceed the max.
5423     // Prevent arithetic overflow here by avoiding addition and testing in this order.
5424     if ((offset >= maxPushConstantsSize) || (size > maxPushConstantsSize - offset)) {
5425         // This is a pain just to adapt the log message to the caller, but better to sort it out only when there is a problem.
5426         if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
5427             if (offset >= maxPushConstantsSize) {
5428                 skip |= log_msg(
5429                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5430                     "VUID-VkPushConstantRange-offset-00294",
5431                     "%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
5432                     caller_name, index, offset, maxPushConstantsSize);
5433             }
5434             if (size > maxPushConstantsSize - offset) {
5435                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5436                                 "VUID-VkPushConstantRange-size-00298",
5437                                 "%s call has push constants index %u with offset %u and size %u that exceeds this device's "
5438                                 "maxPushConstantSize of %u.",
5439                                 caller_name, index, offset, size, maxPushConstantsSize);
5440             }
5441         } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
5442             if (offset >= maxPushConstantsSize) {
5443                 skip |= log_msg(
5444                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5445                     "VUID-vkCmdPushConstants-offset-00370",
5446                     "%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
5447                     caller_name, index, offset, maxPushConstantsSize);
5448             }
5449             if (size > maxPushConstantsSize - offset) {
5450                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5451                                 "VUID-vkCmdPushConstants-size-00371",
5452                                 "%s call has push constants index %u with offset %u and size %u that exceeds this device's "
5453                                 "maxPushConstantSize of %u.",
5454                                 caller_name, index, offset, size, maxPushConstantsSize);
5455             }
5456         } else {
5457             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5458                             kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
5459         }
5460     }
5461     // size needs to be non-zero and a multiple of 4.
5462     if ((size == 0) || ((size & 0x3) != 0)) {
5463         if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
5464             if (size == 0) {
5465                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5466                                 "VUID-VkPushConstantRange-size-00296",
5467                                 "%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
5468                                 index, size);
5469             }
5470             if (size & 0x3) {
5471                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5472                                 "VUID-VkPushConstantRange-size-00297",
5473                                 "%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
5474                                 index, size);
5475             }
5476         } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
5477             if (size == 0) {
5478                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5479                                 "VUID-vkCmdPushConstants-size-arraylength",
5480                                 "%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
5481                                 index, size);
5482             }
5483             if (size & 0x3) {
5484                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5485                                 "VUID-vkCmdPushConstants-size-00369",
5486                                 "%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
5487                                 index, size);
5488             }
5489         } else {
5490             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5491                             kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
5492         }
5493     }
5494     // offset needs to be a multiple of 4.
5495     if ((offset & 0x3) != 0) {
5496         if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
5497             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5498                             "VUID-VkPushConstantRange-offset-00295",
5499                             "%s call has push constants index %u with offset %u. Offset must be a multiple of 4.", caller_name,
5500                             index, offset);
5501         } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
5502             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5503                             "VUID-vkCmdPushConstants-offset-00368",
5504                             "%s call has push constants with offset %u. Offset must be a multiple of 4.", caller_name, offset);
5505         } else {
5506             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5507                             kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
5508         }
5509     }
5510     return skip;
5511 }
5512 
5513 enum DSL_DESCRIPTOR_GROUPS {
5514     DSL_TYPE_SAMPLERS = 0,
5515     DSL_TYPE_UNIFORM_BUFFERS,
5516     DSL_TYPE_STORAGE_BUFFERS,
5517     DSL_TYPE_SAMPLED_IMAGES,
5518     DSL_TYPE_STORAGE_IMAGES,
5519     DSL_TYPE_INPUT_ATTACHMENTS,
5520     DSL_TYPE_INLINE_UNIFORM_BLOCK,
5521     DSL_NUM_DESCRIPTOR_GROUPS
5522 };
5523 
5524 // Used by PreCallValidateCreatePipelineLayout.
5525 // Returns an array of size DSL_NUM_DESCRIPTOR_GROUPS of the maximum number of descriptors used in any single pipeline stage
GetDescriptorCountMaxPerStage(const DeviceFeatures * enabled_features,const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> & set_layouts,bool skip_update_after_bind)5526 std::valarray<uint32_t> GetDescriptorCountMaxPerStage(
5527     const DeviceFeatures *enabled_features,
5528     const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
5529     // Identify active pipeline stages
5530     std::vector<VkShaderStageFlags> stage_flags = {VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_FRAGMENT_BIT,
5531                                                    VK_SHADER_STAGE_COMPUTE_BIT};
5532     if (enabled_features->core.geometryShader) {
5533         stage_flags.push_back(VK_SHADER_STAGE_GEOMETRY_BIT);
5534     }
5535     if (enabled_features->core.tessellationShader) {
5536         stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
5537         stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT);
5538     }
5539 
5540     // Allow iteration over enum values
5541     std::vector<DSL_DESCRIPTOR_GROUPS> dsl_groups = {
5542         DSL_TYPE_SAMPLERS,       DSL_TYPE_UNIFORM_BUFFERS,   DSL_TYPE_STORAGE_BUFFERS,     DSL_TYPE_SAMPLED_IMAGES,
5543         DSL_TYPE_STORAGE_IMAGES, DSL_TYPE_INPUT_ATTACHMENTS, DSL_TYPE_INLINE_UNIFORM_BLOCK};
5544 
5545     // Sum by layouts per stage, then pick max of stages per type
5546     std::valarray<uint32_t> max_sum(0U, DSL_NUM_DESCRIPTOR_GROUPS);  // max descriptor sum among all pipeline stages
5547     for (auto stage : stage_flags) {
5548         std::valarray<uint32_t> stage_sum(0U, DSL_NUM_DESCRIPTOR_GROUPS);  // per-stage sums
5549         for (auto dsl : set_layouts) {
5550             if (skip_update_after_bind &&
5551                 (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
5552                 continue;
5553             }
5554 
5555             for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
5556                 const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
5557                 // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
5558                 if (0 != (stage & binding->stageFlags) && binding->descriptorCount > 0) {
5559                     switch (binding->descriptorType) {
5560                         case VK_DESCRIPTOR_TYPE_SAMPLER:
5561                             stage_sum[DSL_TYPE_SAMPLERS] += binding->descriptorCount;
5562                             break;
5563                         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
5564                         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
5565                             stage_sum[DSL_TYPE_UNIFORM_BUFFERS] += binding->descriptorCount;
5566                             break;
5567                         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
5568                         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
5569                             stage_sum[DSL_TYPE_STORAGE_BUFFERS] += binding->descriptorCount;
5570                             break;
5571                         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
5572                         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
5573                             stage_sum[DSL_TYPE_SAMPLED_IMAGES] += binding->descriptorCount;
5574                             break;
5575                         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
5576                         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
5577                             stage_sum[DSL_TYPE_STORAGE_IMAGES] += binding->descriptorCount;
5578                             break;
5579                         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
5580                             stage_sum[DSL_TYPE_SAMPLED_IMAGES] += binding->descriptorCount;
5581                             stage_sum[DSL_TYPE_SAMPLERS] += binding->descriptorCount;
5582                             break;
5583                         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
5584                             stage_sum[DSL_TYPE_INPUT_ATTACHMENTS] += binding->descriptorCount;
5585                             break;
5586                         case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
5587                             // count one block per binding. descriptorCount is number of bytes
5588                             stage_sum[DSL_TYPE_INLINE_UNIFORM_BLOCK]++;
5589                             break;
5590                         default:
5591                             break;
5592                     }
5593                 }
5594             }
5595         }
5596         for (auto type : dsl_groups) {
5597             max_sum[type] = std::max(stage_sum[type], max_sum[type]);
5598         }
5599     }
5600     return max_sum;
5601 }
5602 
5603 // Used by PreCallValidateCreatePipelineLayout.
5604 // Returns a map indexed by VK_DESCRIPTOR_TYPE_* enum of the summed descriptors by type.
5605 // Note: descriptors only count against the limit once even if used by multiple stages.
GetDescriptorSum(const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> & set_layouts,bool skip_update_after_bind)5606 std::map<uint32_t, uint32_t> GetDescriptorSum(
5607     const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
5608     std::map<uint32_t, uint32_t> sum_by_type;
5609     for (auto dsl : set_layouts) {
5610         if (skip_update_after_bind && (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
5611             continue;
5612         }
5613 
5614         for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
5615             const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
5616             // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
5617             if (binding->descriptorCount > 0) {
5618                 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
5619                     // count one block per binding. descriptorCount is number of bytes
5620                     sum_by_type[binding->descriptorType]++;
5621                 } else {
5622                     sum_by_type[binding->descriptorType] += binding->descriptorCount;
5623                 }
5624             }
5625         }
5626     }
5627     return sum_by_type;
5628 }
5629 
PreCallValidateCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)5630 bool CoreChecks::PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
5631                                                      const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
5632     bool skip = false;
5633 
5634     // Validate layout count against device physical limit
5635     if (pCreateInfo->setLayoutCount > phys_dev_props.limits.maxBoundDescriptorSets) {
5636         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5637                         "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286",
5638                         "vkCreatePipelineLayout(): setLayoutCount (%d) exceeds physical device maxBoundDescriptorSets limit (%d).",
5639                         pCreateInfo->setLayoutCount, phys_dev_props.limits.maxBoundDescriptorSets);
5640     }
5641 
5642     // Validate Push Constant ranges
5643     uint32_t i, j;
5644     for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
5645         skip |= ValidatePushConstantRange(pCreateInfo->pPushConstantRanges[i].offset, pCreateInfo->pPushConstantRanges[i].size,
5646                                           "vkCreatePipelineLayout()", i);
5647         if (0 == pCreateInfo->pPushConstantRanges[i].stageFlags) {
5648             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5649                             "VUID-VkPushConstantRange-stageFlags-requiredbitmask",
5650                             "vkCreatePipelineLayout() call has no stageFlags set.");
5651         }
5652     }
5653 
5654     // As of 1.0.28, there is a VU that states that a stage flag cannot appear more than once in the list of push constant ranges.
5655     for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
5656         for (j = i + 1; j < pCreateInfo->pushConstantRangeCount; ++j) {
5657             if (0 != (pCreateInfo->pPushConstantRanges[i].stageFlags & pCreateInfo->pPushConstantRanges[j].stageFlags)) {
5658                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5659                                 "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292",
5660                                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges %d and %d.", i, j);
5661             }
5662         }
5663     }
5664 
5665     // Early-out
5666     if (skip) return skip;
5667 
5668     std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts(pCreateInfo->setLayoutCount, nullptr);
5669     unsigned int push_descriptor_set_count = 0;
5670     {
5671         for (i = 0; i < pCreateInfo->setLayoutCount; ++i) {
5672             set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
5673             if (set_layouts[i]->IsPushDescriptor()) ++push_descriptor_set_count;
5674         }
5675     }
5676 
5677     if (push_descriptor_set_count > 1) {
5678         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5679                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293",
5680                         "vkCreatePipelineLayout() Multiple push descriptor sets found.");
5681     }
5682 
5683     // Max descriptors by type, within a single pipeline stage
5684     std::valarray<uint32_t> max_descriptors_per_stage = GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, true);
5685     // Samplers
5686     if (max_descriptors_per_stage[DSL_TYPE_SAMPLERS] > phys_dev_props.limits.maxPerStageDescriptorSamplers) {
5687         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5688                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287",
5689                         "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
5690                         "maxPerStageDescriptorSamplers limit (%d).",
5691                         max_descriptors_per_stage[DSL_TYPE_SAMPLERS], phys_dev_props.limits.maxPerStageDescriptorSamplers);
5692     }
5693 
5694     // Uniform buffers
5695     if (max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorUniformBuffers) {
5696         skip |=
5697             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5698                     "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288",
5699                     "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
5700                     "maxPerStageDescriptorUniformBuffers limit (%d).",
5701                     max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS], phys_dev_props.limits.maxPerStageDescriptorUniformBuffers);
5702     }
5703 
5704     // Storage buffers
5705     if (max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorStorageBuffers) {
5706         skip |=
5707             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5708                     "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289",
5709                     "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
5710                     "maxPerStageDescriptorStorageBuffers limit (%d).",
5711                     max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS], phys_dev_props.limits.maxPerStageDescriptorStorageBuffers);
5712     }
5713 
5714     // Sampled images
5715     if (max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorSampledImages) {
5716         skip |=
5717             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5718                     "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290",
5719                     "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
5720                     "maxPerStageDescriptorSampledImages limit (%d).",
5721                     max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES], phys_dev_props.limits.maxPerStageDescriptorSampledImages);
5722     }
5723 
5724     // Storage images
5725     if (max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorStorageImages) {
5726         skip |=
5727             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5728                     "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291",
5729                     "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
5730                     "maxPerStageDescriptorStorageImages limit (%d).",
5731                     max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES], phys_dev_props.limits.maxPerStageDescriptorStorageImages);
5732     }
5733 
5734     // Input attachments
5735     if (max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS] > phys_dev_props.limits.maxPerStageDescriptorInputAttachments) {
5736         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5737                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676",
5738                         "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
5739                         "maxPerStageDescriptorInputAttachments limit (%d).",
5740                         max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS],
5741                         phys_dev_props.limits.maxPerStageDescriptorInputAttachments);
5742     }
5743 
5744     // Inline uniform blocks
5745     if (max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
5746         phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks) {
5747         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5748                         "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214",
5749                         "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
5750                         "maxPerStageDescriptorInlineUniformBlocks limit (%d).",
5751                         max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK],
5752                         phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks);
5753     }
5754 
5755     // Total descriptors by type
5756     //
5757     std::map<uint32_t, uint32_t> sum_all_stages = GetDescriptorSum(set_layouts, true);
5758     // Samplers
5759     uint32_t sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLER] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
5760     if (sum > phys_dev_props.limits.maxDescriptorSetSamplers) {
5761         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5762                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677",
5763                         "vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
5764                         "maxDescriptorSetSamplers limit (%d).",
5765                         sum, phys_dev_props.limits.maxDescriptorSetSamplers);
5766     }
5767 
5768     // Uniform buffers
5769     if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] > phys_dev_props.limits.maxDescriptorSetUniformBuffers) {
5770         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5771                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678",
5772                         "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
5773                         "maxDescriptorSetUniformBuffers limit (%d).",
5774                         sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER], phys_dev_props.limits.maxDescriptorSetUniformBuffers);
5775     }
5776 
5777     // Dynamic uniform buffers
5778     if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic) {
5779         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5780                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679",
5781                         "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
5782                         "maxDescriptorSetUniformBuffersDynamic limit (%d).",
5783                         sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
5784                         phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic);
5785     }
5786 
5787     // Storage buffers
5788     if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] > phys_dev_props.limits.maxDescriptorSetStorageBuffers) {
5789         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5790                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680",
5791                         "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
5792                         "maxDescriptorSetStorageBuffers limit (%d).",
5793                         sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER], phys_dev_props.limits.maxDescriptorSetStorageBuffers);
5794     }
5795 
5796     // Dynamic storage buffers
5797     if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic) {
5798         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5799                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681",
5800                         "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
5801                         "maxDescriptorSetStorageBuffersDynamic limit (%d).",
5802                         sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
5803                         phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic);
5804     }
5805 
5806     //  Sampled images
5807     sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
5808           sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
5809     if (sum > phys_dev_props.limits.maxDescriptorSetSampledImages) {
5810         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5811                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682",
5812                         "vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
5813                         "maxDescriptorSetSampledImages limit (%d).",
5814                         sum, phys_dev_props.limits.maxDescriptorSetSampledImages);
5815     }
5816 
5817     //  Storage images
5818     sum = sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
5819     if (sum > phys_dev_props.limits.maxDescriptorSetStorageImages) {
5820         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5821                         "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683",
5822                         "vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
5823                         "maxDescriptorSetStorageImages limit (%d).",
5824                         sum, phys_dev_props.limits.maxDescriptorSetStorageImages);
5825     }
5826 
5827     // Input attachments
5828     if (sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] > phys_dev_props.limits.maxDescriptorSetInputAttachments) {
5829         skip |=
5830             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5831                     "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684",
5832                     "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
5833                     "maxDescriptorSetInputAttachments limit (%d).",
5834                     sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT], phys_dev_props.limits.maxDescriptorSetInputAttachments);
5835     }
5836 
5837     // Inline uniform blocks
5838     if (sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
5839         phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks) {
5840         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5841                         "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216",
5842                         "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
5843                         "maxDescriptorSetInlineUniformBlocks limit (%d).",
5844                         sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
5845                         phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks);
5846     }
5847 
5848     if (device_extensions.vk_ext_descriptor_indexing) {
5849         // XXX TODO: replace with correct VU messages
5850 
5851         // Max descriptors by type, within a single pipeline stage
5852         std::valarray<uint32_t> max_descriptors_per_stage_update_after_bind =
5853             GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, false);
5854         // Samplers
5855         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS] >
5856             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers) {
5857             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5858                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-03022",
5859                             "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
5860                             "maxPerStageDescriptorUpdateAfterBindSamplers limit (%d).",
5861                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS],
5862                             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers);
5863         }
5864 
5865         // Uniform buffers
5866         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS] >
5867             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers) {
5868             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5869                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023",
5870                             "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
5871                             "maxPerStageDescriptorUpdateAfterBindUniformBuffers limit (%d).",
5872                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS],
5873                             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers);
5874         }
5875 
5876         // Storage buffers
5877         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS] >
5878             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers) {
5879             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5880                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024",
5881                             "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
5882                             "maxPerStageDescriptorUpdateAfterBindStorageBuffers limit (%d).",
5883                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS],
5884                             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers);
5885         }
5886 
5887         // Sampled images
5888         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES] >
5889             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages) {
5890             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5891                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025",
5892                             "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
5893                             "maxPerStageDescriptorUpdateAfterBindSampledImages limit (%d).",
5894                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES],
5895                             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages);
5896         }
5897 
5898         // Storage images
5899         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES] >
5900             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages) {
5901             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5902                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026",
5903                             "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
5904                             "maxPerStageDescriptorUpdateAfterBindStorageImages limit (%d).",
5905                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES],
5906                             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages);
5907         }
5908 
5909         // Input attachments
5910         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS] >
5911             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments) {
5912             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5913                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027",
5914                             "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
5915                             "maxPerStageDescriptorUpdateAfterBindInputAttachments limit (%d).",
5916                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS],
5917                             phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments);
5918         }
5919 
5920         // Inline uniform blocks
5921         if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
5922             phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) {
5923             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5924                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215",
5925                             "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
5926                             "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks limit (%d).",
5927                             max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK],
5928                             phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
5929         }
5930 
5931         // Total descriptors by type, summed across all pipeline stages
5932         //
5933         std::map<uint32_t, uint32_t> sum_all_stages_update_after_bind = GetDescriptorSum(set_layouts, false);
5934         // Samplers
5935         sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLER] +
5936               sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
5937         if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers) {
5938             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5939                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036",
5940                             "vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
5941                             "maxDescriptorSetUpdateAfterBindSamplers limit (%d).",
5942                             sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers);
5943         }
5944 
5945         // Uniform buffers
5946         if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] >
5947             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers) {
5948             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5949                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037",
5950                             "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
5951                             "maxDescriptorSetUpdateAfterBindUniformBuffers limit (%d).",
5952                             sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER],
5953                             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers);
5954         }
5955 
5956         // Dynamic uniform buffers
5957         if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] >
5958             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) {
5959             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5960                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038",
5961                             "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
5962                             "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic limit (%d).",
5963                             sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
5964                             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
5965         }
5966 
5967         // Storage buffers
5968         if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] >
5969             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers) {
5970             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5971                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039",
5972                             "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
5973                             "maxDescriptorSetUpdateAfterBindStorageBuffers limit (%d).",
5974                             sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER],
5975                             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers);
5976         }
5977 
5978         // Dynamic storage buffers
5979         if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] >
5980             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) {
5981             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5982                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040",
5983                             "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
5984                             "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic limit (%d).",
5985                             sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
5986                             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
5987         }
5988 
5989         //  Sampled images
5990         sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] +
5991               sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
5992               sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
5993         if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages) {
5994             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
5995                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041",
5996                             "vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
5997                             "maxDescriptorSetUpdateAfterBindSampledImages limit (%d).",
5998                             sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages);
5999         }
6000 
6001         //  Storage images
6002         sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] +
6003               sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
6004         if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages) {
6005             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
6006                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042",
6007                             "vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
6008                             "maxDescriptorSetUpdateAfterBindStorageImages limit (%d).",
6009                             sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages);
6010         }
6011 
6012         // Input attachments
6013         if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] >
6014             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments) {
6015             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
6016                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043",
6017                             "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
6018                             "maxDescriptorSetUpdateAfterBindInputAttachments limit (%d).",
6019                             sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT],
6020                             phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments);
6021         }
6022 
6023         // Inline uniform blocks
6024         if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
6025             phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks) {
6026             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
6027                             "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217",
6028                             "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
6029                             "maxDescriptorSetUpdateAfterBindInlineUniformBlocks limit (%d).",
6030                             sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
6031                             phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
6032         }
6033     }
6034     return skip;
6035 }
6036 
6037 // For repeatable sorting, not very useful for "memory in range" search
6038 struct PushConstantRangeCompare {
operator ()PushConstantRangeCompare6039     bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
6040         if (lhs->offset == rhs->offset) {
6041             if (lhs->size == rhs->size) {
6042                 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
6043                 return lhs->stageFlags < rhs->stageFlags;
6044             }
6045             // If the offsets are the same then sorting by the end of range is useful for validation
6046             return lhs->size < rhs->size;
6047         }
6048         return lhs->offset < rhs->offset;
6049     }
6050 };
6051 
6052 static PushConstantRangesDict push_constant_ranges_dict;
6053 
GetCanonicalId(const VkPipelineLayoutCreateInfo * info)6054 PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
6055     if (!info->pPushConstantRanges) {
6056         // Hand back the empty entry (creating as needed)...
6057         return push_constant_ranges_dict.look_up(PushConstantRanges());
6058     }
6059 
6060     // Sort the input ranges to ensure equivalent ranges map to the same id
6061     std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
6062     for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
6063         sorted.insert(info->pPushConstantRanges + i);
6064     }
6065 
6066     PushConstantRanges ranges(sorted.size());
6067     for (const auto range : sorted) {
6068         ranges.emplace_back(*range);
6069     }
6070     return push_constant_ranges_dict.look_up(std::move(ranges));
6071 }
6072 
6073 // Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
6074 static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
6075 
6076 // Dictionary of canonical form of the "compatible for set" records
6077 static PipelineLayoutCompatDict pipeline_layout_compat_dict;
6078 
GetCanonicalId(const uint32_t set_index,const PushConstantRangesId pcr_id,const PipelineLayoutSetLayoutsId set_layouts_id)6079 static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
6080                                              const PipelineLayoutSetLayoutsId set_layouts_id) {
6081     return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
6082 }
6083 
PreCallRecordCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout,void * cpl_state_data)6084 void CoreChecks::PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
6085                                                    const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
6086                                                    void *cpl_state_data) {
6087     create_pipeline_layout_api_state *cpl_state = reinterpret_cast<create_pipeline_layout_api_state *>(cpl_state_data);
6088     if (enabled.gpu_validation) {
6089         GpuPreCallCreatePipelineLayout(pCreateInfo, pAllocator, pPipelineLayout, &cpl_state->new_layouts,
6090                                        &cpl_state->modified_create_info);
6091     }
6092 }
6093 
PostCallRecordCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout,VkResult result)6094 void CoreChecks::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
6095                                                     const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
6096                                                     VkResult result) {
6097     StateTracker::PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
6098 
6099     // Clean up GPU validation
6100     if (enabled.gpu_validation) {
6101         GpuPostCallCreatePipelineLayout(result);
6102     }
6103 }
6104 
PostCallRecordCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout,VkResult result)6105 void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
6106                                                                 const VkAllocationCallbacks *pAllocator,
6107                                                                 VkPipelineLayout *pPipelineLayout, VkResult result) {
6108     if (VK_SUCCESS != result) return;
6109 
6110     std::unique_ptr<PIPELINE_LAYOUT_STATE> pipeline_layout_state(new PIPELINE_LAYOUT_STATE{});
6111     pipeline_layout_state->layout = *pPipelineLayout;
6112     pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
6113     PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
6114     for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
6115         pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
6116         set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
6117     }
6118 
6119     // Get canonical form IDs for the "compatible for set" contents
6120     pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
6121     auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
6122     pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
6123 
6124     // Create table of "compatible for set N" cannonical forms for trivial accept validation
6125     for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
6126         pipeline_layout_state->compat_for_set.emplace_back(
6127             GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
6128     }
6129     pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
6130 }
6131 
PostCallRecordCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool,VkResult result)6132 void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
6133                                                                 const VkAllocationCallbacks *pAllocator,
6134                                                                 VkDescriptorPool *pDescriptorPool, VkResult result) {
6135     if (VK_SUCCESS != result) return;
6136     descriptorPoolMap[*pDescriptorPool] =
6137         std::unique_ptr<DESCRIPTOR_POOL_STATE>(new DESCRIPTOR_POOL_STATE(*pDescriptorPool, pCreateInfo));
6138 }
6139 
PreCallValidateResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)6140 bool CoreChecks::PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
6141                                                     VkDescriptorPoolResetFlags flags) {
6142     // Make sure sets being destroyed are not currently in-use
6143     if (disabled.idle_descriptor_set) return false;
6144     bool skip = false;
6145     DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
6146     if (pPool != nullptr) {
6147         for (auto ds : pPool->sets) {
6148             if (ds && ds->in_use.load()) {
6149                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
6150                                 HandleToUint64(descriptorPool), "VUID-vkResetDescriptorPool-descriptorPool-00313",
6151                                 "It is invalid to call vkResetDescriptorPool() with descriptor sets in use by a command buffer.");
6152                 if (skip) break;
6153             }
6154         }
6155     }
6156     return skip;
6157 }
6158 
PostCallRecordResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags,VkResult result)6159 void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
6160                                                                VkDescriptorPoolResetFlags flags, VkResult result) {
6161     if (VK_SUCCESS != result) return;
6162     DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
6163     // TODO: validate flags
6164     // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
6165     for (auto ds : pPool->sets) {
6166         FreeDescriptorSet(ds);
6167     }
6168     pPool->sets.clear();
6169     // Reset available count for each type and available sets for this pool
6170     for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
6171         pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
6172     }
6173     pPool->availableSets = pPool->maxSets;
6174 }
6175 
6176 // Ensure the pool contains enough descriptors and descriptor sets to satisfy
6177 // an allocation request. Fills common_data with the total number of descriptors of each type required,
6178 // as well as DescriptorSetLayout ptrs used for later update.
PreCallValidateAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets,void * ads_state_data)6179 bool CoreChecks::PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
6180                                                        VkDescriptorSet *pDescriptorSets, void *ads_state_data) {
6181     // Always update common data
6182     cvdescriptorset::AllocateDescriptorSetsData *ads_state =
6183         reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
6184     UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
6185     // All state checks for AllocateDescriptorSets is done in single function
6186     return ValidateAllocateDescriptorSets(pAllocateInfo, ads_state);
6187 }
6188 
6189 // Allocation state was good and call down chain was made so update state based on allocating descriptor sets
PostCallRecordAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets,VkResult result,void * ads_state_data)6190 void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
6191                                                                   VkDescriptorSet *pDescriptorSets, VkResult result,
6192                                                                   void *ads_state_data) {
6193     if (VK_SUCCESS != result) return;
6194     // All the updates are contained in a single cvdescriptorset function
6195     cvdescriptorset::AllocateDescriptorSetsData *ads_state =
6196         reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
6197     PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
6198 }
6199 
PreCallValidateFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)6200 bool CoreChecks::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
6201                                                    const VkDescriptorSet *pDescriptorSets) {
6202     // Make sure that no sets being destroyed are in-flight
6203     bool skip = false;
6204     // First make sure sets being destroyed are not currently in-use
6205     for (uint32_t i = 0; i < count; ++i) {
6206         if (pDescriptorSets[i] != VK_NULL_HANDLE) {
6207             skip |= ValidateIdleDescriptorSet(pDescriptorSets[i], "vkFreeDescriptorSets");
6208         }
6209     }
6210     DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
6211     if (pool_state && !(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT & pool_state->createInfo.flags)) {
6212         // Can't Free from a NON_FREE pool
6213         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
6214                         HandleToUint64(descriptorPool), "VUID-vkFreeDescriptorSets-descriptorPool-00312",
6215                         "It is invalid to call vkFreeDescriptorSets() with a pool created without setting "
6216                         "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.");
6217     }
6218     return skip;
6219 }
6220 
PreCallRecordFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)6221 void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
6222                                                              const VkDescriptorSet *pDescriptorSets) {
6223     DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
6224     // Update available descriptor sets in pool
6225     pool_state->availableSets += count;
6226 
6227     // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
6228     for (uint32_t i = 0; i < count; ++i) {
6229         if (pDescriptorSets[i] != VK_NULL_HANDLE) {
6230             auto descriptor_set = setMap[pDescriptorSets[i]].get();
6231             uint32_t type_index = 0, descriptor_count = 0;
6232             for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
6233                 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
6234                 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
6235                 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
6236             }
6237             FreeDescriptorSet(descriptor_set);
6238             pool_state->sets.erase(descriptor_set);
6239         }
6240     }
6241 }
6242 
PreCallValidateUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)6243 bool CoreChecks::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
6244                                                      const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
6245                                                      const VkCopyDescriptorSet *pDescriptorCopies) {
6246     // First thing to do is perform map look-ups.
6247     // NOTE : UpdateDescriptorSets is somewhat unique in that it's operating on a number of DescriptorSets
6248     //  so we can't just do a single map look-up up-front, but do them individually in functions below
6249 
6250     // Now make call(s) that validate state, but don't perform state updates in this function
6251     // Note, here DescriptorSets is unique in that we don't yet have an instance. Using a helper function in the
6252     //  namespace which will parse params and make calls into specific class instances
6253     return ValidateUpdateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies,
6254                                         "vkUpdateDescriptorSets()");
6255 }
6256 
PreCallRecordUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)6257 void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
6258                                                                const VkWriteDescriptorSet *pDescriptorWrites,
6259                                                                uint32_t descriptorCopyCount,
6260                                                                const VkCopyDescriptorSet *pDescriptorCopies) {
6261     cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
6262                                                  pDescriptorCopies);
6263 }
6264 
PostCallRecordAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pCreateInfo,VkCommandBuffer * pCommandBuffer,VkResult result)6265 void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
6266                                                                   VkCommandBuffer *pCommandBuffer, VkResult result) {
6267     if (VK_SUCCESS != result) return;
6268     auto pPool = GetCommandPoolState(pCreateInfo->commandPool);
6269     if (pPool) {
6270         for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
6271             // Add command buffer to its commandPool map
6272             pPool->commandBuffers.insert(pCommandBuffer[i]);
6273             std::unique_ptr<CMD_BUFFER_STATE> pCB(new CMD_BUFFER_STATE{});
6274             pCB->createInfo = *pCreateInfo;
6275             pCB->device = device;
6276             // Add command buffer to map
6277             commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
6278             ResetCommandBufferState(pCommandBuffer[i]);
6279         }
6280     }
6281 }
6282 
6283 // Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
AddFramebufferBinding(CMD_BUFFER_STATE * cb_state,FRAMEBUFFER_STATE * fb_state)6284 void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
6285     AddCommandBufferBinding(&fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer),
6286                             cb_state);
6287 
6288     const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
6289     for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
6290         auto view_state = GetAttachmentImageViewState(fb_state, attachment);
6291         if (view_state) {
6292             AddCommandBufferBindingImageView(cb_state, view_state);
6293         }
6294     }
6295 }
6296 
PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)6297 bool CoreChecks::PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) {
6298     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6299     if (!cb_state) return false;
6300     bool skip = false;
6301     if (cb_state->in_use.load()) {
6302         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6303                         HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00049",
6304                         "Calling vkBeginCommandBuffer() on active %s before it has completed. You must check "
6305                         "command buffer fence before this call.",
6306                         report_data->FormatHandle(commandBuffer).c_str());
6307     }
6308     if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
6309         // Secondary Command Buffer
6310         const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
6311         if (!pInfo) {
6312             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6313                             HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00051",
6314                             "vkBeginCommandBuffer(): Secondary %s must have inheritance info.",
6315                             report_data->FormatHandle(commandBuffer).c_str());
6316         } else {
6317             if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
6318                 assert(pInfo->renderPass);
6319                 const auto *framebuffer = GetFramebufferState(pInfo->framebuffer);
6320                 if (framebuffer) {
6321                     if (framebuffer->createInfo.renderPass != pInfo->renderPass) {
6322                         const auto *render_pass = GetRenderPassState(pInfo->renderPass);
6323                         // renderPass that framebuffer was created with must be compatible with local renderPass
6324                         skip |= ValidateRenderPassCompatibility("framebuffer", framebuffer->rp_state.get(), "command buffer",
6325                                                                 render_pass, "vkBeginCommandBuffer()",
6326                                                                 "VUID-VkCommandBufferBeginInfo-flags-00055");
6327                     }
6328                 }
6329             }
6330             if ((pInfo->occlusionQueryEnable == VK_FALSE || enabled_features.core.occlusionQueryPrecise == VK_FALSE) &&
6331                 (pInfo->queryFlags & VK_QUERY_CONTROL_PRECISE_BIT)) {
6332                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6333                                 HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00052",
6334                                 "vkBeginCommandBuffer(): Secondary %s must not have VK_QUERY_CONTROL_PRECISE_BIT if "
6335                                 "occulusionQuery is disabled or the device does not support precise occlusion queries.",
6336                                 report_data->FormatHandle(commandBuffer).c_str());
6337             }
6338         }
6339         if (pInfo && pInfo->renderPass != VK_NULL_HANDLE) {
6340             const auto *renderPass = GetRenderPassState(pInfo->renderPass);
6341             if (renderPass) {
6342                 if (pInfo->subpass >= renderPass->createInfo.subpassCount) {
6343                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6344                                     HandleToUint64(commandBuffer), "VUID-VkCommandBufferBeginInfo-flags-00054",
6345                                     "vkBeginCommandBuffer(): Secondary %s must have a subpass index (%d) that is "
6346                                     "less than the number of subpasses (%d).",
6347                                     report_data->FormatHandle(commandBuffer).c_str(), pInfo->subpass,
6348                                     renderPass->createInfo.subpassCount);
6349                 }
6350             }
6351         }
6352     }
6353     if (CB_RECORDING == cb_state->state) {
6354         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6355                         HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00049",
6356                         "vkBeginCommandBuffer(): Cannot call Begin on %s in the RECORDING state. Must first call "
6357                         "vkEndCommandBuffer().",
6358                         report_data->FormatHandle(commandBuffer).c_str());
6359     } else if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
6360         VkCommandPool cmdPool = cb_state->createInfo.commandPool;
6361         const auto *pPool = GetCommandPoolState(cmdPool);
6362         if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
6363             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6364                             HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00050",
6365                             "Call to vkBeginCommandBuffer() on %s attempts to implicitly reset cmdBuffer created from "
6366                             "%s that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
6367                             report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
6368         }
6369     }
6370     auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
6371     if (chained_device_group_struct) {
6372         skip |= ValidateDeviceMaskToPhysicalDeviceCount(
6373             chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
6374             "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106");
6375         skip |=
6376             ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6377                                      HandleToUint64(commandBuffer), "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107");
6378     }
6379     return skip;
6380 }
6381 
PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)6382 void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
6383                                                              const VkCommandBufferBeginInfo *pBeginInfo) {
6384     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6385     if (!cb_state) return;
6386     // This implicitly resets the Cmd Buffer so make sure any fence is done and then clear memory references
6387     ClearCmdBufAndMemReferences(cb_state);
6388     if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
6389         // Secondary Command Buffer
6390         const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
6391         if (pInfo) {
6392             if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
6393                 assert(pInfo->renderPass);
6394                 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
6395                 if (framebuffer) {
6396                     // Connect this framebuffer and its children to this cmdBuffer
6397                     AddFramebufferBinding(cb_state, framebuffer);
6398                 }
6399             }
6400         }
6401     }
6402     if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
6403         ResetCommandBufferState(commandBuffer);
6404     }
6405     // Set updated state here in case implicit reset occurs above
6406     cb_state->state = CB_RECORDING;
6407     cb_state->beginInfo = *pBeginInfo;
6408     if (cb_state->beginInfo.pInheritanceInfo) {
6409         cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
6410         cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
6411         // If we are a secondary command-buffer and inheriting.  Update the items we should inherit.
6412         if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
6413             (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
6414             cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
6415             cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
6416             cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
6417             cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
6418         }
6419     }
6420 
6421     auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
6422     if (chained_device_group_struct) {
6423         cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
6424     } else {
6425         cb_state->initial_device_mask = (1 << physical_device_count) - 1;
6426     }
6427 }
6428 
PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer)6429 bool CoreChecks::PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) {
6430     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6431     if (!cb_state) return false;
6432     bool skip = false;
6433     if ((VK_COMMAND_BUFFER_LEVEL_PRIMARY == cb_state->createInfo.level) ||
6434         !(cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
6435         // This needs spec clarification to update valid usage, see comments in PR:
6436         // https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/165
6437         skip |= InsideRenderPass(cb_state, "vkEndCommandBuffer()", "VUID-vkEndCommandBuffer-commandBuffer-00060");
6438     }
6439 
6440     skip |= ValidateCmd(cb_state, CMD_ENDCOMMANDBUFFER, "vkEndCommandBuffer()");
6441     for (auto query : cb_state->activeQueries) {
6442         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6443                         HandleToUint64(commandBuffer), "VUID-vkEndCommandBuffer-commandBuffer-00061",
6444                         "Ending command buffer with in progress query: %s, query %d.",
6445                         report_data->FormatHandle(query.pool).c_str(), query.query);
6446     }
6447     return skip;
6448 }
6449 
PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer,VkResult result)6450 void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
6451     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6452     if (!cb_state) return;
6453     // Cached validation is specific to a specific recording of a specific command buffer.
6454     for (auto descriptor_set : cb_state->validated_descriptor_sets) {
6455         descriptor_set->ClearCachedValidation(cb_state);
6456     }
6457     cb_state->validated_descriptor_sets.clear();
6458     if (VK_SUCCESS == result) {
6459         cb_state->state = CB_RECORDED;
6460     }
6461 }
6462 
PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)6463 bool CoreChecks::PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
6464     bool skip = false;
6465     const CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
6466     if (!pCB) return false;
6467     VkCommandPool cmdPool = pCB->createInfo.commandPool;
6468     const auto *pPool = GetCommandPoolState(cmdPool);
6469 
6470     if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
6471         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6472                         HandleToUint64(commandBuffer), "VUID-vkResetCommandBuffer-commandBuffer-00046",
6473                         "Attempt to reset %s created from %s that does NOT have the "
6474                         "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
6475                         report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
6476     }
6477     skip |= CheckCommandBufferInFlight(pCB, "reset", "VUID-vkResetCommandBuffer-commandBuffer-00045");
6478 
6479     return skip;
6480 }
6481 
PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags,VkResult result)6482 void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
6483                                                               VkResult result) {
6484     if (VK_SUCCESS == result) {
6485         ResetCommandBufferState(commandBuffer);
6486     }
6487 }
6488 
GetPipelineTypeName(VkPipelineBindPoint pipelineBindPoint)6489 static const char *GetPipelineTypeName(VkPipelineBindPoint pipelineBindPoint) {
6490     switch (pipelineBindPoint) {
6491         case VK_PIPELINE_BIND_POINT_GRAPHICS:
6492             return "graphics";
6493         case VK_PIPELINE_BIND_POINT_COMPUTE:
6494             return "compute";
6495         case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
6496             return "ray-tracing";
6497         default:
6498             return "unknown";
6499     }
6500 }
6501 
PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)6502 bool CoreChecks::PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
6503                                                 VkPipeline pipeline) {
6504     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6505     assert(cb_state);
6506 
6507     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindPipeline()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
6508                                       "VUID-vkCmdBindPipeline-commandBuffer-cmdpool");
6509     skip |= ValidateCmd(cb_state, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
6510     static const std::map<VkPipelineBindPoint, std::string> bindpoint_errors = {
6511         std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdBindPipeline-pipelineBindPoint-00777"),
6512         std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdBindPipeline-pipelineBindPoint-00778"),
6513         std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdBindPipeline-pipelineBindPoint-02391")};
6514 
6515     skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, "vkCmdBindPipeline()", bindpoint_errors);
6516 
6517     const auto *pipeline_state = GetPipelineState(pipeline);
6518     assert(pipeline_state);
6519 
6520     const auto &pipeline_state_bind_point = pipeline_state->getPipelineType();
6521 
6522     if (pipelineBindPoint != pipeline_state_bind_point) {
6523         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) {
6524             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6525                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-00779",
6526                             "Cannot bind a pipeline of type %s to the graphics pipeline bind point",
6527                             GetPipelineTypeName(pipeline_state_bind_point));
6528         } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
6529             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6530                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-00780",
6531                             "Cannot bind a pipeline of type %s to the compute pipeline bind point",
6532                             GetPipelineTypeName(pipeline_state_bind_point));
6533         } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
6534             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6535                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-02392",
6536                             "Cannot bind a pipeline of type %s to the ray-tracing pipeline bind point",
6537                             GetPipelineTypeName(pipeline_state_bind_point));
6538         }
6539     }
6540 
6541     return skip;
6542 }
6543 
PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)6544 void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
6545                                                           VkPipeline pipeline) {
6546     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6547     assert(cb_state);
6548 
6549     auto pipe_state = GetPipelineState(pipeline);
6550     if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
6551         cb_state->status &= ~cb_state->static_status;
6552         cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
6553         cb_state->status |= cb_state->static_status;
6554     }
6555     cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
6556     SetPipelineState(pipe_state);
6557     AddCommandBufferBinding(&pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
6558 }
6559 
PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)6560 bool CoreChecks::PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
6561                                                const VkViewport *pViewports) {
6562     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6563     assert(cb_state);
6564     bool skip =
6565         ValidateCmdQueueFlags(cb_state, "vkCmdSetViewport()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
6566     skip |= ValidateCmd(cb_state, CMD_SETVIEWPORT, "vkCmdSetViewport()");
6567     if (cb_state->static_status & CBSTATUS_VIEWPORT_SET) {
6568         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6569                         HandleToUint64(commandBuffer), "VUID-vkCmdSetViewport-None-01221",
6570                         "vkCmdSetViewport(): pipeline was created without VK_DYNAMIC_STATE_VIEWPORT flag.");
6571     }
6572     return skip;
6573 }
6574 
PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)6575 void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
6576                                                          uint32_t viewportCount, const VkViewport *pViewports) {
6577     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6578     cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
6579     cb_state->status |= CBSTATUS_VIEWPORT_SET;
6580 }
6581 
PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)6582 bool CoreChecks::PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
6583                                               const VkRect2D *pScissors) {
6584     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6585     assert(cb_state);
6586     bool skip =
6587         ValidateCmdQueueFlags(cb_state, "vkCmdSetScissor()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetScissor-commandBuffer-cmdpool");
6588     skip |= ValidateCmd(cb_state, CMD_SETSCISSOR, "vkCmdSetScissor()");
6589     if (cb_state->static_status & CBSTATUS_SCISSOR_SET) {
6590         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6591                         HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-None-00590",
6592                         "vkCmdSetScissor(): pipeline was created without VK_DYNAMIC_STATE_SCISSOR flag..");
6593     }
6594     return skip;
6595 }
6596 
PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)6597 void ValidationStateTracker ::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
6598                                                          uint32_t scissorCount, const VkRect2D *pScissors) {
6599     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6600     cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
6601     cb_state->status |= CBSTATUS_SCISSOR_SET;
6602 }
6603 
PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer,uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VkRect2D * pExclusiveScissors)6604 bool CoreChecks::PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
6605                                                          uint32_t exclusiveScissorCount, const VkRect2D *pExclusiveScissors) {
6606     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6607     assert(cb_state);
6608     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetExclusiveScissorNV()", VK_QUEUE_GRAPHICS_BIT,
6609                                       "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool");
6610     skip |= ValidateCmd(cb_state, CMD_SETEXCLUSIVESCISSORNV, "vkCmdSetExclusiveScissorNV()");
6611     if (cb_state->static_status & CBSTATUS_EXCLUSIVE_SCISSOR_SET) {
6612         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6613                         HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-None-02032",
6614                         "vkCmdSetExclusiveScissorNV(): pipeline was created without VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV flag.");
6615     }
6616 
6617     if (!enabled_features.exclusive_scissor.exclusiveScissor) {
6618         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6619                         HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-None-02031",
6620                         "vkCmdSetExclusiveScissorNV: The exclusiveScissor feature is disabled.");
6621     }
6622 
6623     return skip;
6624 }
6625 
PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer,uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VkRect2D * pExclusiveScissors)6626 void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
6627                                                                    uint32_t exclusiveScissorCount,
6628                                                                    const VkRect2D *pExclusiveScissors) {
6629     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6630     // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
6631     // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
6632     cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
6633 }
6634 
PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer,VkImageView imageView,VkImageLayout imageLayout)6635 bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
6636                                                           VkImageLayout imageLayout) {
6637     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6638     assert(cb_state);
6639     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindShadingRateImageNV()", VK_QUEUE_GRAPHICS_BIT,
6640                                       "VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool");
6641 
6642     skip |= ValidateCmd(cb_state, CMD_BINDSHADINGRATEIMAGENV, "vkCmdBindShadingRateImageNV()");
6643 
6644     if (!enabled_features.shading_rate_image.shadingRateImage) {
6645         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6646                         HandleToUint64(commandBuffer), "VUID-vkCmdBindShadingRateImageNV-None-02058",
6647                         "vkCmdBindShadingRateImageNV: The shadingRateImage feature is disabled.");
6648     }
6649 
6650     if (imageView != VK_NULL_HANDLE) {
6651         const auto view_state = GetImageViewState(imageView);
6652         auto &ivci = view_state->create_info;
6653 
6654         if (!view_state || (ivci.viewType != VK_IMAGE_VIEW_TYPE_2D && ivci.viewType != VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
6655             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
6656                             HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02059",
6657                             "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must be a valid "
6658                             "VkImageView handle of type VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY.");
6659         }
6660 
6661         if (view_state && ivci.format != VK_FORMAT_R8_UINT) {
6662             skip |= log_msg(
6663                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, HandleToUint64(imageView),
6664                 "VUID-vkCmdBindShadingRateImageNV-imageView-02060",
6665                 "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must have a format of VK_FORMAT_R8_UINT.");
6666         }
6667 
6668         const VkImageCreateInfo *ici = view_state ? &GetImageState(view_state->create_info.image)->createInfo : nullptr;
6669         if (ici && !(ici->usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV)) {
6670             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
6671                             HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02061",
6672                             "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, the image must have been "
6673                             "created with VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV set.");
6674         }
6675 
6676         if (view_state) {
6677             const auto image_state = GetImageState(view_state->create_info.image);
6678             bool hit_error = false;
6679 
6680             // XXX TODO: While the VUID says "each subresource", only the base mip level is
6681             // actually used. Since we don't have an existing convenience function to iterate
6682             // over all mip levels, just don't bother with non-base levels.
6683             VkImageSubresourceRange &range = view_state->create_info.subresourceRange;
6684             VkImageSubresourceLayers subresource = {range.aspectMask, range.baseMipLevel, range.baseArrayLayer, range.layerCount};
6685 
6686             if (image_state) {
6687                 skip |= VerifyImageLayout(cb_state, image_state, subresource, imageLayout, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
6688                                           "vkCmdCopyImage()", "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063",
6689                                           "VUID-vkCmdBindShadingRateImageNV-imageView-02062", &hit_error);
6690             }
6691         }
6692     }
6693 
6694     return skip;
6695 }
6696 
PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer,VkImageView imageView,VkImageLayout imageLayout)6697 void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
6698                                                                     VkImageLayout imageLayout) {
6699     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6700 
6701     if (imageView != VK_NULL_HANDLE) {
6702         auto view_state = GetImageViewState(imageView);
6703         AddCommandBufferBindingImageView(cb_state, view_state);
6704     }
6705 }
6706 
PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkShadingRatePaletteNV * pShadingRatePalettes)6707 bool CoreChecks::PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
6708                                                                    uint32_t viewportCount,
6709                                                                    const VkShadingRatePaletteNV *pShadingRatePalettes) {
6710     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6711     assert(cb_state);
6712     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportShadingRatePaletteNV()", VK_QUEUE_GRAPHICS_BIT,
6713                                       "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool");
6714 
6715     skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTSHADINGRATEPALETTENV, "vkCmdSetViewportShadingRatePaletteNV()");
6716 
6717     if (!enabled_features.shading_rate_image.shadingRateImage) {
6718         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6719                         HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064",
6720                         "vkCmdSetViewportShadingRatePaletteNV: The shadingRateImage feature is disabled.");
6721     }
6722 
6723     if (cb_state->static_status & CBSTATUS_SHADING_RATE_PALETTE_SET) {
6724         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6725                         HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02065",
6726                         "vkCmdSetViewportShadingRatePaletteNV(): pipeline was created without "
6727                         "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV flag.");
6728     }
6729 
6730     for (uint32_t i = 0; i < viewportCount; ++i) {
6731         auto *palette = &pShadingRatePalettes[i];
6732         if (palette->shadingRatePaletteEntryCount == 0 ||
6733             palette->shadingRatePaletteEntryCount > phys_dev_ext_props.shading_rate_image_props.shadingRatePaletteSize) {
6734             skip |= log_msg(
6735                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6736                 HandleToUint64(commandBuffer), "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071",
6737                 "vkCmdSetViewportShadingRatePaletteNV: shadingRatePaletteEntryCount must be between 1 and shadingRatePaletteSize.");
6738         }
6739     }
6740 
6741     return skip;
6742 }
6743 
PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkShadingRatePaletteNV * pShadingRatePalettes)6744 void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
6745                                                                              uint32_t viewportCount,
6746                                                                              const VkShadingRatePaletteNV *pShadingRatePalettes) {
6747     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6748     // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
6749     // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
6750     cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
6751 }
6752 
ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV & triangles,VkDebugReportObjectTypeEXT object_type,uint64_t object_handle,const char * func_name) const6753 bool CoreChecks::ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles, VkDebugReportObjectTypeEXT object_type,
6754                                              uint64_t object_handle, const char *func_name) const {
6755     bool skip = false;
6756 
6757     const BUFFER_STATE *vb_state = GetBufferState(triangles.vertexData);
6758     if (vb_state != nullptr && vb_state->binding.size <= triangles.vertexOffset) {
6759         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
6760                         "VUID-VkGeometryTrianglesNV-vertexOffset-02428", "%s", func_name);
6761     }
6762 
6763     const BUFFER_STATE *ib_state = GetBufferState(triangles.indexData);
6764     if (ib_state != nullptr && ib_state->binding.size <= triangles.indexOffset) {
6765         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
6766                         "VUID-VkGeometryTrianglesNV-indexOffset-02431", "%s", func_name);
6767     }
6768 
6769     const BUFFER_STATE *td_state = GetBufferState(triangles.transformData);
6770     if (td_state != nullptr && td_state->binding.size <= triangles.transformOffset) {
6771         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
6772                         "VUID-VkGeometryTrianglesNV-transformOffset-02437", "%s", func_name);
6773     }
6774 
6775     return skip;
6776 }
6777 
ValidateGeometryAABBNV(const VkGeometryAABBNV & aabbs,VkDebugReportObjectTypeEXT object_type,uint64_t object_handle,const char * func_name) const6778 bool CoreChecks::ValidateGeometryAABBNV(const VkGeometryAABBNV &aabbs, VkDebugReportObjectTypeEXT object_type,
6779                                         uint64_t object_handle, const char *func_name) const {
6780     bool skip = false;
6781 
6782     const BUFFER_STATE *aabb_state = GetBufferState(aabbs.aabbData);
6783     if (aabb_state != nullptr && aabb_state->binding.size > 0 && aabb_state->binding.size <= aabbs.offset) {
6784         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
6785                         "VUID-VkGeometryAABBNV-offset-02439", "%s", func_name);
6786     }
6787 
6788     return skip;
6789 }
6790 
ValidateGeometryNV(const VkGeometryNV & geometry,VkDebugReportObjectTypeEXT object_type,uint64_t object_handle,const char * func_name) const6791 bool CoreChecks::ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
6792                                     const char *func_name) const {
6793     bool skip = false;
6794     if (geometry.geometryType == VK_GEOMETRY_TYPE_TRIANGLES_NV) {
6795         skip = ValidateGeometryTrianglesNV(geometry.geometry.triangles, object_type, object_handle, func_name);
6796     } else if (geometry.geometryType == VK_GEOMETRY_TYPE_AABBS_NV) {
6797         skip = ValidateGeometryAABBNV(geometry.geometry.aabbs, object_type, object_handle, func_name);
6798     }
6799     return skip;
6800 }
6801 
PreCallValidateCreateAccelerationStructureNV(VkDevice device,const VkAccelerationStructureCreateInfoNV * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkAccelerationStructureNV * pAccelerationStructure)6802 bool CoreChecks::PreCallValidateCreateAccelerationStructureNV(VkDevice device,
6803                                                               const VkAccelerationStructureCreateInfoNV *pCreateInfo,
6804                                                               const VkAllocationCallbacks *pAllocator,
6805                                                               VkAccelerationStructureNV *pAccelerationStructure) {
6806     bool skip = false;
6807     if (pCreateInfo != nullptr && pCreateInfo->info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
6808         for (uint32_t i = 0; i < pCreateInfo->info.geometryCount; i++) {
6809             skip |= ValidateGeometryNV(pCreateInfo->info.pGeometries[i], VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
6810                                        HandleToUint64(device), "vkCreateAccelerationStructureNV():");
6811         }
6812     }
6813     return skip;
6814 }
6815 
PostCallRecordCreateAccelerationStructureNV(VkDevice device,const VkAccelerationStructureCreateInfoNV * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkAccelerationStructureNV * pAccelerationStructure,VkResult result)6816 void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
6817                                                                          const VkAccelerationStructureCreateInfoNV *pCreateInfo,
6818                                                                          const VkAllocationCallbacks *pAllocator,
6819                                                                          VkAccelerationStructureNV *pAccelerationStructure,
6820                                                                          VkResult result) {
6821     if (VK_SUCCESS != result) return;
6822     std::unique_ptr<ACCELERATION_STRUCTURE_STATE> as_state(new ACCELERATION_STRUCTURE_STATE(*pAccelerationStructure, pCreateInfo));
6823 
6824     // Query the requirements in case the application doesn't (to avoid bind/validation time query)
6825     VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
6826     as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
6827     as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
6828     as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
6829     DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
6830 
6831     VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
6832     scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
6833     scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
6834     scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
6835     DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
6836                                                          &as_state->build_scratch_memory_requirements);
6837 
6838     VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
6839     update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
6840     update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
6841     update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
6842     DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
6843                                                          &as_state->update_scratch_memory_requirements);
6844 
6845     accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
6846 }
6847 
PostCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device,const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo,VkMemoryRequirements2KHR * pMemoryRequirements)6848 void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
6849     VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
6850     ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
6851     if (as_state != nullptr) {
6852         if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
6853             as_state->memory_requirements = *pMemoryRequirements;
6854             as_state->memory_requirements_checked = true;
6855         } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
6856             as_state->build_scratch_memory_requirements = *pMemoryRequirements;
6857             as_state->build_scratch_memory_requirements_checked = true;
6858         } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
6859             as_state->update_scratch_memory_requirements = *pMemoryRequirements;
6860             as_state->update_scratch_memory_requirements_checked = true;
6861         }
6862     }
6863 }
ValidateBindAccelerationStructureMemoryNV(VkDevice device,const VkBindAccelerationStructureMemoryInfoNV & info) const6864 bool CoreChecks::ValidateBindAccelerationStructureMemoryNV(VkDevice device,
6865                                                            const VkBindAccelerationStructureMemoryInfoNV &info) const {
6866     bool skip = false;
6867 
6868     const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
6869     if (!as_state) {
6870         return skip;
6871     }
6872     uint64_t as_handle = HandleToUint64(info.accelerationStructure);
6873     if (!as_state->GetBoundMemory().empty()) {
6874         skip |=
6875             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
6876                     as_handle, "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450",
6877                     "vkBindAccelerationStructureMemoryNV(): accelerationStructure must not already be backed by a memory object.");
6878     }
6879 
6880     if (!as_state->memory_requirements_checked) {
6881         // There's not an explicit requirement in the spec to call vkGetImageMemoryRequirements() prior to calling
6882         // BindAccelerationStructureMemoryNV but it's implied in that memory being bound must conform with
6883         // VkAccelerationStructureMemoryRequirementsInfoNV from vkGetAccelerationStructureMemoryRequirementsNV
6884         skip |= log_msg(
6885             report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, as_handle,
6886             kVUID_Core_BindAccelNV_NoMemReqQuery,
6887             "vkBindAccelerationStructureMemoryNV(): "
6888             "Binding memory to %s but vkGetAccelerationStructureMemoryRequirementsNV() has not been called on that structure.",
6889             report_data->FormatHandle(info.accelerationStructure).c_str());
6890         // Use requirements gathered at create time for validation below...
6891     }
6892 
6893     // Validate bound memory range information
6894     const auto mem_info = GetDevMemState(info.memory);
6895     if (mem_info) {
6896         skip |= ValidateInsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
6897                                                                as_state->memory_requirements.memoryRequirements,
6898                                                                "vkBindAccelerationStructureMemoryNV()");
6899         skip |= ValidateMemoryTypes(mem_info, as_state->memory_requirements.memoryRequirements.memoryTypeBits,
6900                                     "vkBindAccelerationStructureMemoryNV()",
6901                                     "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593");
6902     }
6903 
6904     // Validate memory requirements alignment
6905     if (SafeModulo(info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment) != 0) {
6906         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
6907                         as_handle, "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594",
6908                         "vkBindAccelerationStructureMemoryNV(): memoryOffset is 0x%" PRIxLEAST64
6909                         " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
6910                         ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
6911                         "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
6912                         info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment);
6913     }
6914 
6915     if (mem_info) {
6916         // Validate memory requirements size
6917         if (as_state->memory_requirements.memoryRequirements.size > (mem_info->alloc_info.allocationSize - info.memoryOffset)) {
6918             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, as_handle,
6919                             "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595",
6920                             "vkBindAccelerationStructureMemoryNV(): memory size minus memoryOffset is 0x%" PRIxLEAST64
6921                             " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
6922                             ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
6923                             "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
6924                             mem_info->alloc_info.allocationSize - info.memoryOffset,
6925                             as_state->memory_requirements.memoryRequirements.size);
6926         }
6927     }
6928 
6929     return skip;
6930 }
PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device,uint32_t bindInfoCount,const VkBindAccelerationStructureMemoryInfoNV * pBindInfos)6931 bool CoreChecks::PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
6932                                                                   const VkBindAccelerationStructureMemoryInfoNV *pBindInfos) {
6933     bool skip = false;
6934     for (uint32_t i = 0; i < bindInfoCount; i++) {
6935         skip |= ValidateBindAccelerationStructureMemoryNV(device, pBindInfos[i]);
6936     }
6937     return skip;
6938 }
6939 
PreCallValidateGetAccelerationStructureHandleNV(VkDevice device,VkAccelerationStructureNV accelerationStructure,size_t dataSize,void * pData)6940 bool CoreChecks::PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
6941                                                                  size_t dataSize, void *pData) {
6942     bool skip = false;
6943 
6944     const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
6945     if (as_state != nullptr) {
6946         // TODO: update the fake VUID below once the real one is generated.
6947         skip = ValidateMemoryIsBoundToAccelerationStructure(
6948             as_state, "vkGetAccelerationStructureHandleNV",
6949             "UNASSIGNED-vkGetAccelerationStructureHandleNV-accelerationStructure-XXXX");
6950     }
6951 
6952     return skip;
6953 }
6954 
PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device,uint32_t bindInfoCount,const VkBindAccelerationStructureMemoryInfoNV * pBindInfos,VkResult result)6955 void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
6956     VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
6957     if (VK_SUCCESS != result) return;
6958     for (uint32_t i = 0; i < bindInfoCount; i++) {
6959         const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
6960 
6961         ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
6962         if (as_state) {
6963             // Track bound memory range information
6964             auto mem_info = GetDevMemState(info.memory);
6965             if (mem_info) {
6966                 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
6967                                                        as_state->requirements);
6968             }
6969             // Track objects tied to memory
6970             SetMemBinding(info.memory, as_state, info.memoryOffset,
6971                           VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
6972         }
6973     }
6974 }
6975 
PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,const VkAccelerationStructureInfoNV * pInfo,VkBuffer instanceData,VkDeviceSize instanceOffset,VkBool32 update,VkAccelerationStructureNV dst,VkAccelerationStructureNV src,VkBuffer scratch,VkDeviceSize scratchOffset)6976 bool CoreChecks::PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
6977                                                                 const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
6978                                                                 VkDeviceSize instanceOffset, VkBool32 update,
6979                                                                 VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
6980                                                                 VkBuffer scratch, VkDeviceSize scratchOffset) {
6981     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6982     assert(cb_state);
6983     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBuildAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
6984                                       "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool");
6985 
6986     skip |= ValidateCmd(cb_state, CMD_BUILDACCELERATIONSTRUCTURENV, "vkCmdBuildAccelerationStructureNV()");
6987 
6988     if (pInfo != nullptr && pInfo->type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
6989         for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
6990             skip |= ValidateGeometryNV(pInfo->pGeometries[i], VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
6991                                        HandleToUint64(device), "vkCmdBuildAccelerationStructureNV():");
6992         }
6993     }
6994 
6995     if (pInfo != nullptr && pInfo->geometryCount > phys_dev_ext_props.ray_tracing_props.maxGeometryCount) {
6996         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
6997                         HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241",
6998                         "vkCmdBuildAccelerationStructureNV(): geometryCount [%d] must be less than or equal to "
6999                         "VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount.",
7000                         pInfo->geometryCount);
7001     }
7002 
7003     const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
7004     const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
7005     const BUFFER_STATE *scratch_buffer_state = GetBufferState(scratch);
7006 
7007     if (dst_as_state != nullptr && pInfo != nullptr) {
7008         if (dst_as_state->create_info.info.type != pInfo->type) {
7009             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7010                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7011                             "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::type"
7012                             "[%s] must be identical to build info VkAccelerationStructureInfoNV::type [%s].",
7013                             string_VkAccelerationStructureTypeNV(dst_as_state->create_info.info.type),
7014                             string_VkAccelerationStructureTypeNV(pInfo->type));
7015         }
7016         if (dst_as_state->create_info.info.flags != pInfo->flags) {
7017             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7018                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7019                             "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::flags"
7020                             "[0x%X] must be identical to build info VkAccelerationStructureInfoNV::flags [0x%X].",
7021                             dst_as_state->create_info.info.flags, pInfo->flags);
7022         }
7023         if (dst_as_state->create_info.info.instanceCount < pInfo->instanceCount) {
7024             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7025                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7026                             "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::instanceCount "
7027                             "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::instanceCount [%d].",
7028                             dst_as_state->create_info.info.instanceCount, pInfo->instanceCount);
7029         }
7030         if (dst_as_state->create_info.info.geometryCount < pInfo->geometryCount) {
7031             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7032                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7033                             "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::geometryCount"
7034                             "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::geometryCount [%d].",
7035                             dst_as_state->create_info.info.geometryCount, pInfo->geometryCount);
7036         } else {
7037             for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
7038                 const VkGeometryDataNV &create_geometry_data = dst_as_state->create_info.info.pGeometries[i].geometry;
7039                 const VkGeometryDataNV &build_geometry_data = pInfo->pGeometries[i].geometry;
7040                 if (create_geometry_data.triangles.vertexCount < build_geometry_data.triangles.vertexCount) {
7041                     skip |= log_msg(
7042                         report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7043                         HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7044                         "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.vertexCount [%d]"
7045                         "must be greater than or equal to build info pGeometries[%d].geometry.triangles.vertexCount [%d].",
7046                         i, create_geometry_data.triangles.vertexCount, i, build_geometry_data.triangles.vertexCount);
7047                     break;
7048                 }
7049                 if (create_geometry_data.triangles.indexCount < build_geometry_data.triangles.indexCount) {
7050                     skip |= log_msg(
7051                         report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7052                         HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7053                         "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.indexCount [%d]"
7054                         "must be greater than or equal to build info pGeometries[%d].geometry.triangles.indexCount [%d].",
7055                         i, create_geometry_data.triangles.indexCount, i, build_geometry_data.triangles.indexCount);
7056                     break;
7057                 }
7058                 if (create_geometry_data.aabbs.numAABBs < build_geometry_data.aabbs.numAABBs) {
7059                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7060                                     HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
7061                                     "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.aabbs.numAABBs [%d]"
7062                                     "must be greater than or equal to build info pGeometries[%d].geometry.aabbs.numAABBs [%d].",
7063                                     i, create_geometry_data.aabbs.numAABBs, i, build_geometry_data.aabbs.numAABBs);
7064                     break;
7065                 }
7066             }
7067         }
7068     }
7069 
7070     if (dst_as_state != nullptr) {
7071         skip |= ValidateMemoryIsBoundToAccelerationStructure(
7072             dst_as_state, "vkCmdBuildAccelerationStructureNV()",
7073             "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
7074     }
7075 
7076     if (update == VK_TRUE) {
7077         if (src == VK_NULL_HANDLE) {
7078             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7079                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
7080                             "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must not be VK_NULL_HANDLE.");
7081         } else {
7082             if (src_as_state == nullptr || !src_as_state->built ||
7083                 !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV)) {
7084                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7085                                 HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
7086                                 "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must have been built before "
7087                                 "with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in "
7088                                 "VkAccelerationStructureInfoNV::flags.");
7089             }
7090         }
7091         if (dst_as_state != nullptr && !dst_as_state->update_scratch_memory_requirements_checked) {
7092             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
7093                             HandleToUint64(dst), kVUID_Core_CmdBuildAccelNV_NoUpdateMemReqQuery,
7094                             "vkCmdBuildAccelerationStructureNV(): Updating %s but vkGetAccelerationStructureMemoryRequirementsNV() "
7095                             "has not been called for update scratch memory.",
7096                             report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
7097             // Use requirements fetched at create time
7098         }
7099         if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
7100             dst_as_state->update_scratch_memory_requirements.memoryRequirements.size >
7101                 (scratch_buffer_state->binding.size - (scratch_buffer_state->binding.offset + scratchOffset))) {
7102             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7103                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02492",
7104                             "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, The size member of the "
7105                             "VkMemoryRequirements structure returned from a call to "
7106                             "vkGetAccelerationStructureMemoryRequirementsNV with "
7107                             "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
7108                             "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
7109                             "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than "
7110                             "or equal to the size of scratch minus scratchOffset");
7111         }
7112     } else {
7113         if (dst_as_state != nullptr && !dst_as_state->build_scratch_memory_requirements_checked) {
7114             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
7115                             HandleToUint64(dst), kVUID_Core_CmdBuildAccelNV_NoScratchMemReqQuery,
7116                             "vkCmdBuildAccelerationStructureNV(): Assigning scratch buffer to %s but "
7117                             "vkGetAccelerationStructureMemoryRequirementsNV() has not been called for scratch memory.",
7118                             report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
7119             // Use requirements fetched at create time
7120         }
7121         if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
7122             dst_as_state->build_scratch_memory_requirements.memoryRequirements.size >
7123                 (scratch_buffer_state->binding.size - (scratch_buffer_state->binding.offset + scratchOffset))) {
7124             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7125                             HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02491",
7126                             "vkCmdBuildAccelerationStructureNV(): If update is VK_FALSE, The size member of the "
7127                             "VkMemoryRequirements structure returned from a call to "
7128                             "vkGetAccelerationStructureMemoryRequirementsNV with "
7129                             "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
7130                             "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
7131                             "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than "
7132                             "or equal to the size of scratch minus scratchOffset");
7133         }
7134     }
7135     return skip;
7136 }
7137 
PostCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,const VkAccelerationStructureInfoNV * pInfo,VkBuffer instanceData,VkDeviceSize instanceOffset,VkBool32 update,VkAccelerationStructureNV dst,VkAccelerationStructureNV src,VkBuffer scratch,VkDeviceSize scratchOffset)7138 void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
7139     VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
7140     VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
7141     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7142     if (cb_state) {
7143         ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
7144         ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
7145         if (dst_as_state != nullptr) {
7146             dst_as_state->built = true;
7147             dst_as_state->build_info.initialize(pInfo);
7148             AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
7149         }
7150         if (src_as_state != nullptr) {
7151             AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
7152         }
7153     }
7154 }
7155 
PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,VkAccelerationStructureNV dst,VkAccelerationStructureNV src,VkCopyAccelerationStructureModeNV mode)7156 bool CoreChecks::PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
7157                                                                VkAccelerationStructureNV src,
7158                                                                VkCopyAccelerationStructureModeNV mode) {
7159     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7160     assert(cb_state);
7161     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
7162                                       "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool");
7163 
7164     skip |= ValidateCmd(cb_state, CMD_COPYACCELERATIONSTRUCTURENV, "vkCmdCopyAccelerationStructureNV()");
7165 
7166     const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
7167     const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
7168 
7169     if (dst_as_state != nullptr) {
7170         skip |= ValidateMemoryIsBoundToAccelerationStructure(
7171             dst_as_state, "vkCmdBuildAccelerationStructureNV()",
7172             "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
7173     }
7174 
7175     if (mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV) {
7176         if (src_as_state != nullptr &&
7177             (!src_as_state->built || !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV))) {
7178             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7179                             HandleToUint64(commandBuffer), "VUID-vkCmdCopyAccelerationStructureNV-src-02497",
7180                             "vkCmdCopyAccelerationStructureNV(): src must have been built with "
7181                             "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is "
7182                             "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV.");
7183         }
7184     }
7185     return skip;
7186 }
7187 
PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,VkAccelerationStructureNV dst,VkAccelerationStructureNV src,VkCopyAccelerationStructureModeNV mode)7188 void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
7189                                                                           VkAccelerationStructureNV dst,
7190                                                                           VkAccelerationStructureNV src,
7191                                                                           VkCopyAccelerationStructureModeNV mode) {
7192     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7193     if (cb_state) {
7194         ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
7195         ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
7196         if (dst_as_state != nullptr && src_as_state != nullptr) {
7197             dst_as_state->built = true;
7198             dst_as_state->build_info = src_as_state->build_info;
7199             AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
7200             AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
7201         }
7202     }
7203 }
7204 
PreCallValidateDestroyAccelerationStructureNV(VkDevice device,VkAccelerationStructureNV accelerationStructure,const VkAllocationCallbacks * pAllocator)7205 bool CoreChecks::PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
7206                                                                const VkAllocationCallbacks *pAllocator) {
7207     const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
7208     const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
7209     bool skip = false;
7210     if (as_state) {
7211         skip |= ValidateObjectNotInUse(as_state, obj_struct, "vkDestroyAccelerationStructureNV",
7212                                        "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442");
7213     }
7214     return skip;
7215 }
7216 
PreCallRecordDestroyAccelerationStructureNV(VkDevice device,VkAccelerationStructureNV accelerationStructure,const VkAllocationCallbacks * pAllocator)7217 void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
7218                                                                          VkAccelerationStructureNV accelerationStructure,
7219                                                                          const VkAllocationCallbacks *pAllocator) {
7220     if (!accelerationStructure) return;
7221     auto *as_state = GetAccelerationStructureState(accelerationStructure);
7222     if (as_state) {
7223         const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
7224         InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
7225         for (auto mem_binding : as_state->GetBoundMemory()) {
7226             auto mem_info = GetDevMemState(mem_binding);
7227             if (mem_info) {
7228                 RemoveAccelerationStructureMemoryRange(HandleToUint64(accelerationStructure), mem_info);
7229             }
7230         }
7231         ClearMemoryObjectBindings(obj_struct);
7232         accelerationStructureMap.erase(accelerationStructure);
7233     }
7234 }
7235 
PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)7236 bool CoreChecks::PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
7237     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7238     assert(cb_state);
7239     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineWidth()", VK_QUEUE_GRAPHICS_BIT,
7240                                       "VUID-vkCmdSetLineWidth-commandBuffer-cmdpool");
7241     skip |= ValidateCmd(cb_state, CMD_SETLINEWIDTH, "vkCmdSetLineWidth()");
7242 
7243     if (cb_state->static_status & CBSTATUS_LINE_WIDTH_SET) {
7244         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7245                         HandleToUint64(commandBuffer), "VUID-vkCmdSetLineWidth-None-00787",
7246                         "vkCmdSetLineWidth called but pipeline was created without VK_DYNAMIC_STATE_LINE_WIDTH flag.");
7247     }
7248     return skip;
7249 }
7250 
PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)7251 void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
7252     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7253     cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
7254 }
7255 
PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)7256 bool CoreChecks::PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
7257                                                      uint16_t lineStipplePattern) {
7258     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7259     assert(cb_state);
7260     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineStippleEXT()", VK_QUEUE_GRAPHICS_BIT,
7261                                       "VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool");
7262     skip |= ValidateCmd(cb_state, CMD_SETLINESTIPPLEEXT, "vkCmdSetLineStippleEXT()");
7263 
7264     if (cb_state->static_status & CBSTATUS_LINE_STIPPLE_SET) {
7265         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7266                         HandleToUint64(commandBuffer), "VUID-vkCmdSetLineStippleEXT-None-02775",
7267                         "vkCmdSetLineStippleEXT called but pipeline was created without VK_DYNAMIC_STATE_LINE_STIPPLE_EXT flag.");
7268     }
7269     return skip;
7270 }
7271 
PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)7272 void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
7273                                                                uint16_t lineStipplePattern) {
7274     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7275     cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
7276 }
7277 
PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)7278 bool CoreChecks::PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
7279                                                 float depthBiasSlopeFactor) {
7280     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7281     assert(cb_state);
7282     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBias()", VK_QUEUE_GRAPHICS_BIT,
7283                                       "VUID-vkCmdSetDepthBias-commandBuffer-cmdpool");
7284     skip |= ValidateCmd(cb_state, CMD_SETDEPTHBIAS, "vkCmdSetDepthBias()");
7285     if (cb_state->static_status & CBSTATUS_DEPTH_BIAS_SET) {
7286         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7287                         HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBias-None-00789",
7288                         "vkCmdSetDepthBias(): pipeline was created without VK_DYNAMIC_STATE_DEPTH_BIAS flag..");
7289     }
7290     if ((depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
7291         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7292                         HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBias-depthBiasClamp-00790",
7293                         "vkCmdSetDepthBias(): the depthBiasClamp device feature is disabled: the depthBiasClamp parameter must "
7294                         "be set to 0.0.");
7295     }
7296     return skip;
7297 }
7298 
PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)7299 void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
7300                                                           float depthBiasClamp, float depthBiasSlopeFactor) {
7301     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7302     cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
7303 }
7304 
PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])7305 bool CoreChecks::PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
7306     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7307     assert(cb_state);
7308     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetBlendConstants()", VK_QUEUE_GRAPHICS_BIT,
7309                                       "VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool");
7310     skip |= ValidateCmd(cb_state, CMD_SETBLENDCONSTANTS, "vkCmdSetBlendConstants()");
7311     if (cb_state->static_status & CBSTATUS_BLEND_CONSTANTS_SET) {
7312         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7313                         HandleToUint64(commandBuffer), "VUID-vkCmdSetBlendConstants-None-00612",
7314                         "vkCmdSetBlendConstants(): pipeline was created without VK_DYNAMIC_STATE_BLEND_CONSTANTS flag..");
7315     }
7316     return skip;
7317 }
7318 
PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])7319 void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
7320     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7321     cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
7322 }
7323 
PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)7324 bool CoreChecks::PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
7325     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7326     assert(cb_state);
7327     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBounds()", VK_QUEUE_GRAPHICS_BIT,
7328                                       "VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool");
7329     skip |= ValidateCmd(cb_state, CMD_SETDEPTHBOUNDS, "vkCmdSetDepthBounds()");
7330     if (cb_state->static_status & CBSTATUS_DEPTH_BOUNDS_SET) {
7331         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7332                         HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBounds-None-00599",
7333                         "vkCmdSetDepthBounds(): pipeline was created without VK_DYNAMIC_STATE_DEPTH_BOUNDS flag..");
7334     }
7335     return skip;
7336 }
7337 
PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)7338 void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
7339                                                             float maxDepthBounds) {
7340     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7341     cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
7342 }
7343 
PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)7344 bool CoreChecks::PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
7345                                                          uint32_t compareMask) {
7346     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7347     assert(cb_state);
7348     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilCompareMask()", VK_QUEUE_GRAPHICS_BIT,
7349                                       "VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool");
7350     skip |= ValidateCmd(cb_state, CMD_SETSTENCILCOMPAREMASK, "vkCmdSetStencilCompareMask()");
7351     if (cb_state->static_status & CBSTATUS_STENCIL_READ_MASK_SET) {
7352         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7353                         HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilCompareMask-None-00602",
7354                         "vkCmdSetStencilCompareMask(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK flag..");
7355     }
7356     return skip;
7357 }
7358 
PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)7359 void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
7360                                                                    uint32_t compareMask) {
7361     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7362     cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
7363 }
7364 
PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)7365 bool CoreChecks::PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
7366                                                        uint32_t writeMask) {
7367     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7368     assert(cb_state);
7369     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilWriteMask()", VK_QUEUE_GRAPHICS_BIT,
7370                                       "VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool");
7371     skip |= ValidateCmd(cb_state, CMD_SETSTENCILWRITEMASK, "vkCmdSetStencilWriteMask()");
7372     if (cb_state->static_status & CBSTATUS_STENCIL_WRITE_MASK_SET) {
7373         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7374                         HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilWriteMask-None-00603",
7375                         "vkCmdSetStencilWriteMask(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_WRITE_MASK flag..");
7376     }
7377     return skip;
7378 }
7379 
PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)7380 void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
7381                                                                  uint32_t writeMask) {
7382     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7383     cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
7384 }
7385 
PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)7386 bool CoreChecks::PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
7387                                                        uint32_t reference) {
7388     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7389     assert(cb_state);
7390     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilReference()", VK_QUEUE_GRAPHICS_BIT,
7391                                       "VUID-vkCmdSetStencilReference-commandBuffer-cmdpool");
7392     skip |= ValidateCmd(cb_state, CMD_SETSTENCILREFERENCE, "vkCmdSetStencilReference()");
7393     if (cb_state->static_status & CBSTATUS_STENCIL_REFERENCE_SET) {
7394         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7395                         HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilReference-None-00604",
7396                         "vkCmdSetStencilReference(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_REFERENCE flag..");
7397     }
7398     return skip;
7399 }
7400 
PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)7401 void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
7402                                                                  uint32_t reference) {
7403     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7404     cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
7405 }
7406 
7407 // Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
7408 // One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
7409 // is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE * cb_state,VkPipelineBindPoint pipeline_bind_point,const PIPELINE_LAYOUT_STATE * pipeline_layout,uint32_t first_set,uint32_t set_count,const VkDescriptorSet * pDescriptorSets,cvdescriptorset::DescriptorSet * push_descriptor_set,uint32_t dynamic_offset_count,const uint32_t * p_dynamic_offsets)7410 void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
7411                                                            const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
7412                                                            uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
7413                                                            cvdescriptorset::DescriptorSet *push_descriptor_set,
7414                                                            uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
7415     assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
7416     // Defensive
7417     assert(pipeline_layout);
7418     if (!pipeline_layout) return;
7419 
7420     uint32_t required_size = first_set + set_count;
7421     const uint32_t last_binding_index = required_size - 1;
7422     assert(last_binding_index < pipeline_layout->compat_for_set.size());
7423 
7424     // Some useful shorthand
7425     auto &last_bound = cb_state->lastBound[pipeline_bind_point];
7426     auto &pipe_compat_ids = pipeline_layout->compat_for_set;
7427     const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
7428 
7429     // We need this three times in this function, but nowhere else
7430     auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
7431         if (ds && ds->IsPushDescriptor()) {
7432             assert(ds == last_bound.push_descriptor_set.get());
7433             last_bound.push_descriptor_set = nullptr;
7434             return true;
7435         }
7436         return false;
7437     };
7438 
7439     // Clean up the "disturbed" before and after the range to be set
7440     if (required_size < current_size) {
7441         if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
7442             // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
7443             for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
7444                 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
7445             }
7446         } else {
7447             // We're not disturbing past last, so leave the upper binding data alone.
7448             required_size = current_size;
7449         }
7450     }
7451 
7452     // We resize if we need more set entries or if those past "last" are disturbed
7453     if (required_size != current_size) {
7454         last_bound.per_set.resize(required_size);
7455     }
7456 
7457     // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
7458     for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
7459         if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
7460             push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
7461             last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
7462             last_bound.per_set[set_idx].dynamicOffsets.clear();
7463             last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
7464         }
7465     }
7466 
7467     // Now update the bound sets with the input sets
7468     const uint32_t *input_dynamic_offsets = p_dynamic_offsets;  // "read" pointer for dynamic offset data
7469     for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
7470         auto set_idx = input_idx + first_set;  // set_idx is index within layout, input_idx is index within input descriptor sets
7471         cvdescriptorset::DescriptorSet *descriptor_set =
7472             push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
7473 
7474         // Record binding (or push)
7475         if (descriptor_set != last_bound.push_descriptor_set.get()) {
7476             // Only cleanup the push descriptors if they aren't the currently used set.
7477             push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
7478         }
7479         last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
7480         last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];  // compat ids are canonical *per* set index
7481 
7482         if (descriptor_set) {
7483             auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
7484             // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
7485             if (set_dynamic_descriptor_count && input_dynamic_offsets) {
7486                 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
7487                 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
7488                 input_dynamic_offsets = end_offset;
7489                 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
7490             } else {
7491                 last_bound.per_set[set_idx].dynamicOffsets.clear();
7492             }
7493             if (!descriptor_set->IsPushDescriptor()) {
7494                 // Can't cache validation of push_descriptors
7495                 cb_state->validated_descriptor_sets.insert(descriptor_set);
7496             }
7497         }
7498     }
7499 }
7500 
7501 // Update the bound state for the bind point, including the effects of incompatible pipeline layouts
PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t setCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)7502 void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
7503                                                                 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
7504                                                                 uint32_t firstSet, uint32_t setCount,
7505                                                                 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
7506                                                                 const uint32_t *pDynamicOffsets) {
7507     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7508     auto pipeline_layout = GetPipelineLayout(layout);
7509 
7510     // Resize binding arrays
7511     uint32_t last_set_index = firstSet + setCount - 1;
7512     if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
7513         cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
7514     }
7515 
7516     UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
7517                                   dynamicOffsetCount, pDynamicOffsets);
7518     cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
7519 }
7520 
ValidateDynamicOffsetAlignment(const debug_report_data * report_data,const VkDescriptorSetLayoutBinding * binding,VkDescriptorType test_type,VkDeviceSize alignment,const uint32_t * pDynamicOffsets,const char * err_msg,const char * limit_name,uint32_t * offset_idx)7521 static bool ValidateDynamicOffsetAlignment(const debug_report_data *report_data, const VkDescriptorSetLayoutBinding *binding,
7522                                            VkDescriptorType test_type, VkDeviceSize alignment, const uint32_t *pDynamicOffsets,
7523                                            const char *err_msg, const char *limit_name, uint32_t *offset_idx) {
7524     bool skip = false;
7525     if (binding->descriptorType == test_type) {
7526         const auto end_idx = *offset_idx + binding->descriptorCount;
7527         for (uint32_t current_idx = *offset_idx; current_idx < end_idx; current_idx++) {
7528             if (SafeModulo(pDynamicOffsets[current_idx], alignment) != 0) {
7529                 skip |= log_msg(
7530                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, err_msg,
7531                     "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of device limit %s 0x%" PRIxLEAST64
7532                     ".",
7533                     current_idx, pDynamicOffsets[current_idx], limit_name, alignment);
7534             }
7535         }
7536         *offset_idx = end_idx;
7537     }
7538     return skip;
7539 }
7540 
PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t setCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)7541 bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
7542                                                       VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
7543                                                       const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
7544                                                       const uint32_t *pDynamicOffsets) {
7545     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7546     assert(cb_state);
7547     bool skip = false;
7548     skip |= ValidateCmdQueueFlags(cb_state, "vkCmdBindDescriptorSets()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7549                                   "VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool");
7550     skip |= ValidateCmd(cb_state, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescriptorSets()");
7551     // Track total count of dynamic descriptor types to make sure we have an offset for each one
7552     uint32_t total_dynamic_descriptors = 0;
7553     string error_string = "";
7554 
7555     const auto *pipeline_layout = GetPipelineLayout(layout);
7556     for (uint32_t set_idx = 0; set_idx < setCount; set_idx++) {
7557         const cvdescriptorset::DescriptorSet *descriptor_set = GetSetNode(pDescriptorSets[set_idx]);
7558         if (descriptor_set) {
7559             // Verify that set being bound is compatible with overlapping setLayout of pipelineLayout
7560             if (!VerifySetLayoutCompatibility(descriptor_set, pipeline_layout, set_idx + firstSet, error_string)) {
7561                 skip |=
7562                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
7563                             HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358",
7564                             "descriptorSet #%u being bound is not compatible with overlapping descriptorSetLayout at index %u of "
7565                             "%s due to: %s.",
7566                             set_idx, set_idx + firstSet, report_data->FormatHandle(layout).c_str(), error_string.c_str());
7567             }
7568 
7569             auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
7570             if (set_dynamic_descriptor_count) {
7571                 // First make sure we won't overstep bounds of pDynamicOffsets array
7572                 if ((total_dynamic_descriptors + set_dynamic_descriptor_count) > dynamicOffsetCount) {
7573                     // Test/report this here, such that we don't run past the end of pDynamicOffsets in the else clause
7574                     skip |=
7575                         log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
7576                                 HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
7577                                 "descriptorSet #%u (%s) requires %u dynamicOffsets, but only %u dynamicOffsets are left in "
7578                                 "pDynamicOffsets array. There must be one dynamic offset for each dynamic descriptor being bound.",
7579                                 set_idx, report_data->FormatHandle(pDescriptorSets[set_idx]).c_str(),
7580                                 descriptor_set->GetDynamicDescriptorCount(), (dynamicOffsetCount - total_dynamic_descriptors));
7581                     // Set the number found to the maximum to prevent duplicate messages, or subsquent descriptor sets from
7582                     // testing against the "short tail" we're skipping below.
7583                     total_dynamic_descriptors = dynamicOffsetCount;
7584                 } else {  // Validate dynamic offsets and Dynamic Offset Minimums
7585                     uint32_t cur_dyn_offset = total_dynamic_descriptors;
7586                     const auto dsl = descriptor_set->GetLayout();
7587                     const auto binding_count = dsl->GetBindingCount();
7588                     const auto &limits = phys_dev_props.limits;
7589                     for (uint32_t binding_idx = 0; binding_idx < binding_count; binding_idx++) {
7590                         const auto *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
7591                         skip |= ValidateDynamicOffsetAlignment(report_data, binding, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
7592                                                                limits.minUniformBufferOffsetAlignment, pDynamicOffsets,
7593                                                                "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971",
7594                                                                "minUniformBufferOffsetAlignment", &cur_dyn_offset);
7595                         skip |= ValidateDynamicOffsetAlignment(report_data, binding, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
7596                                                                limits.minStorageBufferOffsetAlignment, pDynamicOffsets,
7597                                                                "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972",
7598                                                                "minStorageBufferOffsetAlignment", &cur_dyn_offset);
7599                     }
7600                     // Keep running total of dynamic descriptor count to verify at the end
7601                     total_dynamic_descriptors += set_dynamic_descriptor_count;
7602                 }
7603             }
7604         } else {
7605             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
7606                             HandleToUint64(pDescriptorSets[set_idx]), kVUID_Core_DrawState_InvalidSet,
7607                             "Attempt to bind %s that doesn't exist!", report_data->FormatHandle(pDescriptorSets[set_idx]).c_str());
7608         }
7609     }
7610     //  dynamicOffsetCount must equal the total number of dynamic descriptors in the sets being bound
7611     if (total_dynamic_descriptors != dynamicOffsetCount) {
7612         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7613                         HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
7614                         "Attempting to bind %u descriptorSets with %u dynamic descriptors, but dynamicOffsetCount is %u. It should "
7615                         "exactly match the number of dynamic descriptors.",
7616                         setCount, total_dynamic_descriptors, dynamicOffsetCount);
7617     }
7618     return skip;
7619 }
7620 
7621 // Validates that the supplied bind point is supported for the command buffer (vis. the command pool)
7622 // Takes array of error codes as some of the VUID's (e.g. vkCmdBindPipeline) are written per bindpoint
7623 // TODO add vkCmdBindPipeline bind_point validation using this call.
ValidatePipelineBindPoint(const CMD_BUFFER_STATE * cb_state,VkPipelineBindPoint bind_point,const char * func_name,const std::map<VkPipelineBindPoint,std::string> & bind_errors) const7624 bool CoreChecks::ValidatePipelineBindPoint(const CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point, const char *func_name,
7625                                            const std::map<VkPipelineBindPoint, std::string> &bind_errors) const {
7626     bool skip = false;
7627     auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
7628     if (pool) {  // The loss of a pool in a recording cmd is reported in DestroyCommandPool
7629         static const std::map<VkPipelineBindPoint, VkQueueFlags> flag_mask = {
7630             std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT)),
7631             std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, static_cast<VkQueueFlags>(VK_QUEUE_COMPUTE_BIT)),
7632             std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
7633                            static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT)),
7634         };
7635         const auto &qfp = GetPhysicalDeviceState()->queue_family_properties[pool->queueFamilyIndex];
7636         if (0 == (qfp.queueFlags & flag_mask.at(bind_point))) {
7637             const std::string &error = bind_errors.at(bind_point);
7638             auto cb_u64 = HandleToUint64(cb_state->commandBuffer);
7639             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_u64,
7640                             error, "%s: %s was allocated from %s that does not support bindpoint %s.", func_name,
7641                             report_data->FormatHandle(cb_state->commandBuffer).c_str(),
7642                             report_data->FormatHandle(cb_state->createInfo.commandPool).c_str(),
7643                             string_VkPipelineBindPoint(bind_point));
7644         }
7645     }
7646     return skip;
7647 }
7648 
PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites)7649 bool CoreChecks::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
7650                                                         VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
7651                                                         const VkWriteDescriptorSet *pDescriptorWrites) {
7652     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7653     assert(cb_state);
7654     const char *func_name = "vkCmdPushDescriptorSetKHR()";
7655     bool skip = false;
7656     skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETKHR, func_name);
7657     skip |= ValidateCmdQueueFlags(cb_state, func_name, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT),
7658                                   "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
7659 
7660     static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
7661         std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
7662         std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
7663         std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363")};
7664 
7665     skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, func_name, bind_errors);
7666     auto layout_data = GetPipelineLayout(layout);
7667 
7668     // Validate the set index points to a push descriptor set and is in range
7669     if (layout_data) {
7670         const auto &set_layouts = layout_data->set_layouts;
7671         const auto layout_u64 = HandleToUint64(layout);
7672         if (set < set_layouts.size()) {
7673             const auto dsl = set_layouts[set];
7674             if (dsl) {
7675                 if (!dsl->IsPushDescriptor()) {
7676                     skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
7677                                    layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
7678                                    "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name,
7679                                    set, report_data->FormatHandle(layout).c_str());
7680                 } else {
7681                     // Create an empty proxy in order to use the existing descriptor set update validation
7682                     // TODO move the validation (like this) that doesn't need descriptor set state to the DSL object so we
7683                     // don't have to do this.
7684                     cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, VK_NULL_HANDLE, dsl, 0, this);
7685                     skip |= ValidatePushDescriptorsUpdate(&proxy_ds, descriptorWriteCount, pDescriptorWrites, func_name);
7686                 }
7687             }
7688         } else {
7689             skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, layout_u64,
7690                            "VUID-vkCmdPushDescriptorSetKHR-set-00364",
7691                            "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
7692                            report_data->FormatHandle(layout).c_str(), static_cast<uint32_t>(set_layouts.size()));
7693         }
7694     }
7695 
7696     return skip;
7697 }
7698 
RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE * cb_state,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites)7699 void CoreChecks::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
7700                                                  VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
7701                                                  const VkWriteDescriptorSet *pDescriptorWrites) {
7702     const auto &pipeline_layout = GetPipelineLayout(layout);
7703     // Short circuit invalid updates
7704     if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
7705         !pipeline_layout->set_layouts[set]->IsPushDescriptor())
7706         return;
7707 
7708     // We need a descriptor set to update the bindings with, compatible with the passed layout
7709     const auto dsl = pipeline_layout->set_layouts[set];
7710     auto &last_bound = cb_state->lastBound[pipelineBindPoint];
7711     auto &push_descriptor_set = last_bound.push_descriptor_set;
7712     // If we are disturbing the current push_desriptor_set clear it
7713     if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
7714         last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, 0, dsl, 0, this));
7715     }
7716 
7717     UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
7718                                   nullptr);
7719     last_bound.pipeline_layout = layout;
7720 
7721     // Now that we have either the new or extant push_descriptor set ... do the write updates against it
7722     push_descriptor_set->PerformPushDescriptorsUpdate(descriptorWriteCount, pDescriptorWrites);
7723 }
7724 
PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites)7725 void CoreChecks::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
7726                                                       VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
7727                                                       const VkWriteDescriptorSet *pDescriptorWrites) {
7728     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7729     RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
7730 }
7731 
GetIndexAlignment(VkIndexType indexType)7732 static VkDeviceSize GetIndexAlignment(VkIndexType indexType) {
7733     switch (indexType) {
7734         case VK_INDEX_TYPE_UINT16:
7735             return 2;
7736         case VK_INDEX_TYPE_UINT32:
7737             return 4;
7738         case VK_INDEX_TYPE_UINT8_EXT:
7739             return 1;
7740         default:
7741             // Not a real index type. Express no alignment requirement here; we expect upper layer
7742             // to have already picked up on the enum being nonsense.
7743             return 1;
7744     }
7745 }
7746 
PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)7747 bool CoreChecks::PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
7748                                                    VkIndexType indexType) {
7749     const auto buffer_state = GetBufferState(buffer);
7750     const auto cb_node = GetCBState(commandBuffer);
7751     assert(buffer_state);
7752     assert(cb_node);
7753 
7754     bool skip =
7755         ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, true, "VUID-vkCmdBindIndexBuffer-buffer-00433",
7756                                  "vkCmdBindIndexBuffer()", "VK_BUFFER_USAGE_INDEX_BUFFER_BIT");
7757     skip |= ValidateCmdQueueFlags(cb_node, "vkCmdBindIndexBuffer()", VK_QUEUE_GRAPHICS_BIT,
7758                                   "VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool");
7759     skip |= ValidateCmd(cb_node, CMD_BINDINDEXBUFFER, "vkCmdBindIndexBuffer()");
7760     skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindIndexBuffer()", "VUID-vkCmdBindIndexBuffer-buffer-00434");
7761     const auto offset_align = GetIndexAlignment(indexType);
7762     if (offset % offset_align) {
7763         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
7764                         HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-offset-00432",
7765                         "vkCmdBindIndexBuffer() offset (0x%" PRIxLEAST64 ") does not fall on alignment (%s) boundary.", offset,
7766                         string_VkIndexType(indexType));
7767     }
7768 
7769     return skip;
7770 }
7771 
PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)7772 void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
7773                                                              VkIndexType indexType) {
7774     auto buffer_state = GetBufferState(buffer);
7775     auto cb_state = GetCBState(commandBuffer);
7776 
7777     cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
7778     cb_state->index_buffer_binding.buffer = buffer;
7779     cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
7780     cb_state->index_buffer_binding.offset = offset;
7781     cb_state->index_buffer_binding.index_type = indexType;
7782     // Add binding for this index buffer to this commandbuffer
7783     AddCommandBufferBindingBuffer(cb_state, buffer_state);
7784 }
7785 
PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)7786 bool CoreChecks::PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
7787                                                      const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
7788     const auto cb_state = GetCBState(commandBuffer);
7789     assert(cb_state);
7790 
7791     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindVertexBuffers()", VK_QUEUE_GRAPHICS_BIT,
7792                                       "VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool");
7793     skip |= ValidateCmd(cb_state, CMD_BINDVERTEXBUFFERS, "vkCmdBindVertexBuffers()");
7794     for (uint32_t i = 0; i < bindingCount; ++i) {
7795         const auto buffer_state = GetBufferState(pBuffers[i]);
7796         assert(buffer_state);
7797         skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, true,
7798                                          "VUID-vkCmdBindVertexBuffers-pBuffers-00627", "vkCmdBindVertexBuffers()",
7799                                          "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT");
7800         skip |=
7801             ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindVertexBuffers()", "VUID-vkCmdBindVertexBuffers-pBuffers-00628");
7802         if (pOffsets[i] >= buffer_state->createInfo.size) {
7803             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
7804                             HandleToUint64(buffer_state->buffer), "VUID-vkCmdBindVertexBuffers-pOffsets-00626",
7805                             "vkCmdBindVertexBuffers() offset (0x%" PRIxLEAST64 ") is beyond the end of the buffer.", pOffsets[i]);
7806         }
7807     }
7808     return skip;
7809 }
7810 
PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)7811 void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
7812                                                                uint32_t bindingCount, const VkBuffer *pBuffers,
7813                                                                const VkDeviceSize *pOffsets) {
7814     auto cb_state = GetCBState(commandBuffer);
7815 
7816     uint32_t end = firstBinding + bindingCount;
7817     if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
7818         cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
7819     }
7820 
7821     for (uint32_t i = 0; i < bindingCount; ++i) {
7822         auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
7823         vertex_buffer_binding.buffer = pBuffers[i];
7824         vertex_buffer_binding.offset = pOffsets[i];
7825         // Add binding for this vertex buffer to this commandbuffer
7826         AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
7827     }
7828 }
7829 
7830 // Validate that an image's sampleCount matches the requirement for a specific API call
ValidateImageSampleCount(const IMAGE_STATE * image_state,VkSampleCountFlagBits sample_count,const char * location,const std::string & msgCode) const7831 bool CoreChecks::ValidateImageSampleCount(const IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count, const char *location,
7832                                           const std::string &msgCode) const {
7833     bool skip = false;
7834     if (image_state->createInfo.samples != sample_count) {
7835         skip =
7836             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
7837                     HandleToUint64(image_state->image), msgCode, "%s for %s was created with a sample count of %s but must be %s.",
7838                     location, report_data->FormatHandle(image_state->image).c_str(),
7839                     string_VkSampleCountFlagBits(image_state->createInfo.samples), string_VkSampleCountFlagBits(sample_count));
7840     }
7841     return skip;
7842 }
7843 
PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)7844 bool CoreChecks::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
7845                                                 VkDeviceSize dataSize, const void *pData) {
7846     const auto cb_state = GetCBState(commandBuffer);
7847     assert(cb_state);
7848     const auto dst_buffer_state = GetBufferState(dstBuffer);
7849     assert(dst_buffer_state);
7850 
7851     bool skip = false;
7852     skip |= ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-dstBuffer-00035");
7853     // Validate that DST buffer has correct usage flags set
7854     skip |=
7855         ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdUpdateBuffer-dstBuffer-00034",
7856                                  "vkCmdUpdateBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
7857     skip |=
7858         ValidateCmdQueueFlags(cb_state, "vkCmdUpdateBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7859                               "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool");
7860     skip |= ValidateCmd(cb_state, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
7861     skip |= InsideRenderPass(cb_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-renderpass");
7862     return skip;
7863 }
7864 
PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)7865 void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
7866                                                            VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
7867     auto cb_state = GetCBState(commandBuffer);
7868     auto dst_buffer_state = GetBufferState(dstBuffer);
7869 
7870     // Update bindings between buffer and cmd buffer
7871     AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
7872 }
7873 
SetEventStageMask(VkQueue queue,VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)7874 bool CoreChecks::SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
7875     CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
7876     if (pCB) {
7877         pCB->eventToStageMap[event] = stageMask;
7878     }
7879     auto queue_data = queueMap.find(queue);
7880     if (queue_data != queueMap.end()) {
7881         queue_data->second.eventToStageMap[event] = stageMask;
7882     }
7883     return false;
7884 }
7885 
PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)7886 bool CoreChecks::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
7887     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7888     assert(cb_state);
7889     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7890                                       "VUID-vkCmdSetEvent-commandBuffer-cmdpool");
7891     skip |= ValidateCmd(cb_state, CMD_SETEVENT, "vkCmdSetEvent()");
7892     skip |= InsideRenderPass(cb_state, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-renderpass");
7893     skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-stageMask-01150",
7894                                          "VUID-vkCmdSetEvent-stageMask-01151", "VUID-vkCmdSetEvent-stageMask-02107",
7895                                          "VUID-vkCmdSetEvent-stageMask-02108");
7896     return skip;
7897 }
7898 
PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)7899 void CoreChecks::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
7900     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7901     auto event_state = GetEventState(event);
7902     if (event_state) {
7903         AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
7904         event_state->cb_bindings.insert(cb_state);
7905     }
7906     cb_state->events.push_back(event);
7907     if (!cb_state->waitedEvents.count(event)) {
7908         cb_state->writeEventsBeforeWait.push_back(event);
7909     }
7910     cb_state->eventUpdates.emplace_back([=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, stageMask); });
7911 }
7912 
PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)7913 bool CoreChecks::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
7914     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7915     assert(cb_state);
7916 
7917     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdResetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7918                                       "VUID-vkCmdResetEvent-commandBuffer-cmdpool");
7919     skip |= ValidateCmd(cb_state, CMD_RESETEVENT, "vkCmdResetEvent()");
7920     skip |= InsideRenderPass(cb_state, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-renderpass");
7921     skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-stageMask-01154",
7922                                          "VUID-vkCmdResetEvent-stageMask-01155", "VUID-vkCmdResetEvent-stageMask-02109",
7923                                          "VUID-vkCmdResetEvent-stageMask-02110");
7924     return skip;
7925 }
7926 
PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)7927 void CoreChecks::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
7928     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7929     auto event_state = GetEventState(event);
7930     if (event_state) {
7931         AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
7932         event_state->cb_bindings.insert(cb_state);
7933     }
7934     cb_state->events.push_back(event);
7935     if (!cb_state->waitedEvents.count(event)) {
7936         cb_state->writeEventsBeforeWait.push_back(event);
7937     }
7938     // TODO : Add check for "VUID-vkResetEvent-event-01148"
7939     cb_state->eventUpdates.emplace_back(
7940         [=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, VkPipelineStageFlags(0)); });
7941 }
7942 
7943 // Return input pipeline stage flags, expanded for individual bits if VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT is set
ExpandPipelineStageFlags(const DeviceExtensions & extensions,VkPipelineStageFlags inflags)7944 static VkPipelineStageFlags ExpandPipelineStageFlags(const DeviceExtensions &extensions, VkPipelineStageFlags inflags) {
7945     if (~inflags & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) return inflags;
7946 
7947     return (inflags & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) |
7948            (VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
7949             (extensions.vk_nv_mesh_shader ? (VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV | VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV) : 0) |
7950             VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
7951             VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT |
7952             VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
7953             VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
7954             VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
7955             (extensions.vk_ext_conditional_rendering ? VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT : 0) |
7956             (extensions.vk_ext_transform_feedback ? VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT : 0) |
7957             (extensions.vk_nv_shading_rate_image ? VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV : 0) |
7958             (extensions.vk_ext_fragment_density_map ? VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT : 0));
7959 }
7960 
HasNonFramebufferStagePipelineStageFlags(VkPipelineStageFlags inflags)7961 static bool HasNonFramebufferStagePipelineStageFlags(VkPipelineStageFlags inflags) {
7962     return (inflags & ~(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
7963                         VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT)) != 0;
7964 }
7965 
GetGraphicsPipelineStageLogicalOrdinal(VkPipelineStageFlagBits flag)7966 static int GetGraphicsPipelineStageLogicalOrdinal(VkPipelineStageFlagBits flag) {
7967     // Note that the list (and lookup) ignore invalid-for-enabled-extension condition.  This should be checked elsewhere
7968     // and would greatly complicate this intentionally simple implementation
7969     // clang-format off
7970     const VkPipelineStageFlagBits ordered_array[] = {
7971         VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
7972         VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
7973         VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
7974         VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
7975         VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
7976         VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
7977         VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
7978         VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
7979 
7980         // Including the task/mesh shaders here is not technically correct, as they are in a
7981         // separate logical pipeline - but it works for the case this is currently used, and
7982         // fixing it would require significant rework and end up with the code being far more
7983         // verbose for no practical gain.
7984         // However, worth paying attention to this if using this function in a new way.
7985         VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
7986         VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
7987 
7988         VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
7989         VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
7990         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
7991         VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
7992         VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
7993         VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
7994     };
7995     // clang-format on
7996 
7997     const int ordered_array_length = sizeof(ordered_array) / sizeof(VkPipelineStageFlagBits);
7998 
7999     for (int i = 0; i < ordered_array_length; ++i) {
8000         if (ordered_array[i] == flag) {
8001             return i;
8002         }
8003     }
8004 
8005     return -1;
8006 }
8007 
8008 // The following two functions technically have O(N^2) complexity, but it's for a value of O that's largely
8009 // stable and also rather tiny - this could definitely be rejigged to work more efficiently, but the impact
8010 // on runtime is currently negligible, so it wouldn't gain very much.
8011 // If we add a lot more graphics pipeline stages, this set of functions should be rewritten to accomodate.
GetLogicallyEarliestGraphicsPipelineStage(VkPipelineStageFlags inflags)8012 static VkPipelineStageFlagBits GetLogicallyEarliestGraphicsPipelineStage(VkPipelineStageFlags inflags) {
8013     VkPipelineStageFlagBits earliest_bit = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
8014     int earliest_bit_order = GetGraphicsPipelineStageLogicalOrdinal(earliest_bit);
8015 
8016     for (std::size_t i = 0; i < sizeof(VkPipelineStageFlagBits); ++i) {
8017         VkPipelineStageFlagBits current_flag = (VkPipelineStageFlagBits)((inflags & 0x1u) << i);
8018         if (current_flag) {
8019             int new_order = GetGraphicsPipelineStageLogicalOrdinal(current_flag);
8020             if (new_order != -1 && new_order < earliest_bit_order) {
8021                 earliest_bit_order = new_order;
8022                 earliest_bit = current_flag;
8023             }
8024         }
8025         inflags = inflags >> 1;
8026     }
8027     return earliest_bit;
8028 }
8029 
GetLogicallyLatestGraphicsPipelineStage(VkPipelineStageFlags inflags)8030 static VkPipelineStageFlagBits GetLogicallyLatestGraphicsPipelineStage(VkPipelineStageFlags inflags) {
8031     VkPipelineStageFlagBits latest_bit = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
8032     int latest_bit_order = GetGraphicsPipelineStageLogicalOrdinal(latest_bit);
8033 
8034     for (std::size_t i = 0; i < sizeof(VkPipelineStageFlagBits); ++i) {
8035         if (inflags & 0x1u) {
8036             int new_order = GetGraphicsPipelineStageLogicalOrdinal((VkPipelineStageFlagBits)((inflags & 0x1u) << i));
8037             if (new_order != -1 && new_order > latest_bit_order) {
8038                 latest_bit_order = new_order;
8039                 latest_bit = (VkPipelineStageFlagBits)((inflags & 0x1u) << i);
8040             }
8041         }
8042         inflags = inflags >> 1;
8043     }
8044     return latest_bit;
8045 }
8046 
8047 // Verify image barrier image state and that the image is consistent with FB image
ValidateImageBarrierImage(const char * funcName,CMD_BUFFER_STATE const * cb_state,VkFramebuffer framebuffer,uint32_t active_subpass,const safe_VkSubpassDescription2KHR & sub_desc,const VulkanTypedHandle & rp_handle,uint32_t img_index,const VkImageMemoryBarrier & img_barrier)8048 bool CoreChecks::ValidateImageBarrierImage(const char *funcName, CMD_BUFFER_STATE const *cb_state, VkFramebuffer framebuffer,
8049                                            uint32_t active_subpass, const safe_VkSubpassDescription2KHR &sub_desc,
8050                                            const VulkanTypedHandle &rp_handle, uint32_t img_index,
8051                                            const VkImageMemoryBarrier &img_barrier) {
8052     bool skip = false;
8053     const auto &fb_state = GetFramebufferState(framebuffer);
8054     assert(fb_state);
8055     const auto img_bar_image = img_barrier.image;
8056     bool image_match = false;
8057     bool sub_image_found = false;  // Do we find a corresponding subpass description
8058     VkImageLayout sub_image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
8059     uint32_t attach_index = 0;
8060     // Verify that a framebuffer image matches barrier image
8061     const auto attachmentCount = fb_state->createInfo.attachmentCount;
8062     for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
8063         auto view_state = GetAttachmentImageViewState(fb_state, attachment);
8064         if (view_state && (img_bar_image == view_state->create_info.image)) {
8065             image_match = true;
8066             attach_index = attachment;
8067             break;
8068         }
8069     }
8070     if (image_match) {  // Make sure subpass is referring to matching attachment
8071         if (sub_desc.pDepthStencilAttachment && sub_desc.pDepthStencilAttachment->attachment == attach_index) {
8072             sub_image_layout = sub_desc.pDepthStencilAttachment->layout;
8073             sub_image_found = true;
8074         } else if (device_extensions.vk_khr_depth_stencil_resolve) {
8075             const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(sub_desc.pNext);
8076             if (resolve && resolve->pDepthStencilResolveAttachment &&
8077                 resolve->pDepthStencilResolveAttachment->attachment == attach_index) {
8078                 sub_image_layout = resolve->pDepthStencilResolveAttachment->layout;
8079                 sub_image_found = true;
8080             }
8081         } else {
8082             for (uint32_t j = 0; j < sub_desc.colorAttachmentCount; ++j) {
8083                 if (sub_desc.pColorAttachments && sub_desc.pColorAttachments[j].attachment == attach_index) {
8084                     sub_image_layout = sub_desc.pColorAttachments[j].layout;
8085                     sub_image_found = true;
8086                     break;
8087                 } else if (sub_desc.pResolveAttachments && sub_desc.pResolveAttachments[j].attachment == attach_index) {
8088                     sub_image_layout = sub_desc.pResolveAttachments[j].layout;
8089                     sub_image_found = true;
8090                     break;
8091                 }
8092             }
8093         }
8094         if (!sub_image_found) {
8095             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8096                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-image-02635",
8097                             "%s: Barrier pImageMemoryBarriers[%d].%s is not referenced by the VkSubpassDescription for "
8098                             "active subpass (%d) of current %s.",
8099                             funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
8100                             report_data->FormatHandle(rp_handle).c_str());
8101         }
8102     } else {  // !image_match
8103         auto const fb_handle = HandleToUint64(fb_state->framebuffer);
8104         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, fb_handle,
8105                         "VUID-vkCmdPipelineBarrier-image-02635",
8106                         "%s: Barrier pImageMemoryBarriers[%d].%s does not match an image from the current %s.", funcName, img_index,
8107                         report_data->FormatHandle(img_bar_image).c_str(), report_data->FormatHandle(fb_state->framebuffer).c_str());
8108     }
8109     if (img_barrier.oldLayout != img_barrier.newLayout) {
8110         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8111                         HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-oldLayout-01181",
8112                         "%s: As the Image Barrier for %s is being executed within a render pass instance, oldLayout must "
8113                         "equal newLayout yet they are %s and %s.",
8114                         funcName, report_data->FormatHandle(img_barrier.image).c_str(), string_VkImageLayout(img_barrier.oldLayout),
8115                         string_VkImageLayout(img_barrier.newLayout));
8116     } else {
8117         if (sub_image_found && sub_image_layout != img_barrier.oldLayout) {
8118             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8119                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-oldLayout-02636",
8120                             "%s: Barrier pImageMemoryBarriers[%d].%s is referenced by the VkSubpassDescription for active "
8121                             "subpass (%d) of current %s as having layout %s, but image barrier has layout %s.",
8122                             funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
8123                             report_data->FormatHandle(rp_handle).c_str(), string_VkImageLayout(sub_image_layout),
8124                             string_VkImageLayout(img_barrier.oldLayout));
8125         }
8126     }
8127     return skip;
8128 }
8129 
8130 // Validate image barriers within a renderPass
ValidateRenderPassImageBarriers(const char * funcName,CMD_BUFFER_STATE * cb_state,uint32_t active_subpass,const safe_VkSubpassDescription2KHR & sub_desc,const VulkanTypedHandle & rp_handle,const safe_VkSubpassDependency2KHR * dependencies,const std::vector<uint32_t> & self_dependencies,uint32_t image_mem_barrier_count,const VkImageMemoryBarrier * image_barriers)8131 bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state, uint32_t active_subpass,
8132                                                  const safe_VkSubpassDescription2KHR &sub_desc, const VulkanTypedHandle &rp_handle,
8133                                                  const safe_VkSubpassDependency2KHR *dependencies,
8134                                                  const std::vector<uint32_t> &self_dependencies, uint32_t image_mem_barrier_count,
8135                                                  const VkImageMemoryBarrier *image_barriers) {
8136     bool skip = false;
8137     for (uint32_t i = 0; i < image_mem_barrier_count; ++i) {
8138         const auto &img_barrier = image_barriers[i];
8139         const auto &img_src_access_mask = img_barrier.srcAccessMask;
8140         const auto &img_dst_access_mask = img_barrier.dstAccessMask;
8141         bool access_mask_match = false;
8142         for (const auto self_dep_index : self_dependencies) {
8143             const auto &sub_dep = dependencies[self_dep_index];
8144             access_mask_match = (img_src_access_mask == (sub_dep.srcAccessMask & img_src_access_mask)) &&
8145                                 (img_dst_access_mask == (sub_dep.dstAccessMask & img_dst_access_mask));
8146             if (access_mask_match) break;
8147         }
8148         if (!access_mask_match) {
8149             std::stringstream self_dep_ss;
8150             stream_join(self_dep_ss, ", ", self_dependencies);
8151             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8152                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8153                             "%s: Barrier pImageMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency "
8154                             "srcAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
8155                             funcName, i, img_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
8156                             self_dep_ss.str().c_str());
8157             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8158                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8159                             "%s: Barrier pImageMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency "
8160                             "dstAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
8161                             funcName, i, img_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
8162                             self_dep_ss.str().c_str());
8163         }
8164         if (VK_QUEUE_FAMILY_IGNORED != img_barrier.srcQueueFamilyIndex ||
8165             VK_QUEUE_FAMILY_IGNORED != img_barrier.dstQueueFamilyIndex) {
8166             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8167                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
8168                             "%s: Barrier pImageMemoryBarriers[%d].srcQueueFamilyIndex is %d and "
8169                             "pImageMemoryBarriers[%d].dstQueueFamilyIndex is %d but both must be VK_QUEUE_FAMILY_IGNORED.",
8170                             funcName, i, img_barrier.srcQueueFamilyIndex, i, img_barrier.dstQueueFamilyIndex);
8171         }
8172         // Secondary CBs can have null framebuffer so queue up validation in that case 'til FB is known
8173         if (VK_NULL_HANDLE == cb_state->activeFramebuffer) {
8174             assert(VK_COMMAND_BUFFER_LEVEL_SECONDARY == cb_state->createInfo.level);
8175             // Secondary CB case w/o FB specified delay validation
8176             cb_state->cmd_execute_commands_functions.emplace_back([=](const CMD_BUFFER_STATE *primary_cb, VkFramebuffer fb) {
8177                 return ValidateImageBarrierImage(funcName, cb_state, fb, active_subpass, sub_desc, rp_handle, i, img_barrier);
8178             });
8179         } else {
8180             skip |= ValidateImageBarrierImage(funcName, cb_state, cb_state->activeFramebuffer, active_subpass, sub_desc, rp_handle,
8181                                               i, img_barrier);
8182         }
8183     }
8184     return skip;
8185 }
8186 
8187 // Validate VUs for Pipeline Barriers that are within a renderPass
8188 // Pre: cb_state->activeRenderPass must be a pointer to valid renderPass state
ValidateRenderPassPipelineBarriers(const char * funcName,CMD_BUFFER_STATE * cb_state,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,VkDependencyFlags dependency_flags,uint32_t mem_barrier_count,const VkMemoryBarrier * mem_barriers,uint32_t buffer_mem_barrier_count,const VkBufferMemoryBarrier * buffer_mem_barriers,uint32_t image_mem_barrier_count,const VkImageMemoryBarrier * image_barriers)8189 bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state,
8190                                                     VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
8191                                                     VkDependencyFlags dependency_flags, uint32_t mem_barrier_count,
8192                                                     const VkMemoryBarrier *mem_barriers, uint32_t buffer_mem_barrier_count,
8193                                                     const VkBufferMemoryBarrier *buffer_mem_barriers,
8194                                                     uint32_t image_mem_barrier_count, const VkImageMemoryBarrier *image_barriers) {
8195     bool skip = false;
8196     const auto rp_state = cb_state->activeRenderPass;
8197     const auto active_subpass = cb_state->activeSubpass;
8198     const VulkanTypedHandle rp_handle(rp_state->renderPass, kVulkanObjectTypeRenderPass);
8199     const auto &self_dependencies = rp_state->self_dependencies[active_subpass];
8200     const auto &dependencies = rp_state->createInfo.pDependencies;
8201     if (self_dependencies.size() == 0) {
8202         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
8203                         "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8204                         "%s: Barriers cannot be set during subpass %d of %s with no self-dependency specified.", funcName,
8205                         active_subpass, report_data->FormatHandle(rp_handle).c_str());
8206     } else {
8207         // Grab ref to current subpassDescription up-front for use below
8208         const auto &sub_desc = rp_state->createInfo.pSubpasses[active_subpass];
8209         // Look for matching mask in any self-dependency
8210         bool stage_mask_match = false;
8211         for (const auto self_dep_index : self_dependencies) {
8212             const auto &sub_dep = dependencies[self_dep_index];
8213             const auto &sub_src_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.srcStageMask);
8214             const auto &sub_dst_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.dstStageMask);
8215             stage_mask_match = ((sub_src_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
8216                                 (src_stage_mask == (sub_src_stage_mask & src_stage_mask))) &&
8217                                ((sub_dst_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
8218                                 (dst_stage_mask == (sub_dst_stage_mask & dst_stage_mask)));
8219             if (stage_mask_match) break;
8220         }
8221         if (!stage_mask_match) {
8222             std::stringstream self_dep_ss;
8223             stream_join(self_dep_ss, ", ", self_dependencies);
8224             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8225                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8226                             "%s: Barrier srcStageMask(0x%X) is not a subset of VkSubpassDependency srcStageMask of any "
8227                             "self-dependency of subpass %d of %s for which dstStageMask is also a subset. "
8228                             "Candidate VkSubpassDependency are pDependencies entries [%s].",
8229                             funcName, src_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
8230                             self_dep_ss.str().c_str());
8231             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8232                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8233                             "%s: Barrier dstStageMask(0x%X) is not a subset of VkSubpassDependency dstStageMask of any "
8234                             "self-dependency of subpass %d of %s for which srcStageMask is also a subset. "
8235                             "Candidate VkSubpassDependency are pDependencies entries [%s].",
8236                             funcName, dst_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
8237                             self_dep_ss.str().c_str());
8238         }
8239 
8240         if (0 != buffer_mem_barrier_count) {
8241             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8242                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
8243                             "%s: bufferMemoryBarrierCount is non-zero (%d) for subpass %d of %s.", funcName,
8244                             buffer_mem_barrier_count, active_subpass, report_data->FormatHandle(rp_handle).c_str());
8245         }
8246         for (uint32_t i = 0; i < mem_barrier_count; ++i) {
8247             const auto &mb_src_access_mask = mem_barriers[i].srcAccessMask;
8248             const auto &mb_dst_access_mask = mem_barriers[i].dstAccessMask;
8249             bool access_mask_match = false;
8250             for (const auto self_dep_index : self_dependencies) {
8251                 const auto &sub_dep = dependencies[self_dep_index];
8252                 access_mask_match = (mb_src_access_mask == (sub_dep.srcAccessMask & mb_src_access_mask)) &&
8253                                     (mb_dst_access_mask == (sub_dep.dstAccessMask & mb_dst_access_mask));
8254                 if (access_mask_match) break;
8255             }
8256 
8257             if (!access_mask_match) {
8258                 std::stringstream self_dep_ss;
8259                 stream_join(self_dep_ss, ", ", self_dependencies);
8260                 skip |= log_msg(
8261                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
8262                     "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8263                     "%s: Barrier pMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency srcAccessMask "
8264                     "for any self-dependency of subpass %d of %s for which dstAccessMask is also a subset. "
8265                     "Candidate VkSubpassDependency are pDependencies entries [%s].",
8266                     funcName, i, mb_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
8267                     self_dep_ss.str().c_str());
8268                 skip |= log_msg(
8269                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
8270                     "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8271                     "%s: Barrier pMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency dstAccessMask "
8272                     "for any self-dependency of subpass %d of %s for which srcAccessMask is also a subset. "
8273                     "Candidate VkSubpassDependency are pDependencies entries [%s].",
8274                     funcName, i, mb_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
8275                     self_dep_ss.str().c_str());
8276             }
8277         }
8278 
8279         skip |= ValidateRenderPassImageBarriers(funcName, cb_state, active_subpass, sub_desc, rp_handle, dependencies,
8280                                                 self_dependencies, image_mem_barrier_count, image_barriers);
8281 
8282         bool flag_match = false;
8283         for (const auto self_dep_index : self_dependencies) {
8284             const auto &sub_dep = dependencies[self_dep_index];
8285             flag_match = sub_dep.dependencyFlags == dependency_flags;
8286             if (flag_match) break;
8287         }
8288         if (!flag_match) {
8289             std::stringstream self_dep_ss;
8290             stream_join(self_dep_ss, ", ", self_dependencies);
8291             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
8292                             rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
8293                             "%s: dependencyFlags param (0x%X) does not equal VkSubpassDependency dependencyFlags value for any "
8294                             "self-dependency of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
8295                             funcName, dependency_flags, cb_state->activeSubpass, report_data->FormatHandle(rp_handle).c_str(),
8296                             self_dep_ss.str().c_str());
8297         }
8298     }
8299     return skip;
8300 }
8301 
8302 // Array to mask individual accessMask to corresponding stageMask
8303 //  accessMask active bit position (0-31) maps to index
8304 const static VkPipelineStageFlags AccessMaskToPipeStage[28] = {
8305     // VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0
8306     VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
8307     // VK_ACCESS_INDEX_READ_BIT = 1
8308     VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
8309     // VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 2
8310     VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
8311     // VK_ACCESS_UNIFORM_READ_BIT = 3
8312     VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
8313         VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
8314         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
8315         VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
8316     // VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 4
8317     VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
8318     // VK_ACCESS_SHADER_READ_BIT = 5
8319     VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
8320         VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
8321         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
8322         VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
8323     // VK_ACCESS_SHADER_WRITE_BIT = 6
8324     VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
8325         VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
8326         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
8327         VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
8328     // VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 7
8329     VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
8330     // VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 8
8331     VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
8332     // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 9
8333     VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
8334     // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 10
8335     VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
8336     // VK_ACCESS_TRANSFER_READ_BIT = 11
8337     VK_PIPELINE_STAGE_TRANSFER_BIT,
8338     // VK_ACCESS_TRANSFER_WRITE_BIT = 12
8339     VK_PIPELINE_STAGE_TRANSFER_BIT,
8340     // VK_ACCESS_HOST_READ_BIT = 13
8341     VK_PIPELINE_STAGE_HOST_BIT,
8342     // VK_ACCESS_HOST_WRITE_BIT = 14
8343     VK_PIPELINE_STAGE_HOST_BIT,
8344     // VK_ACCESS_MEMORY_READ_BIT = 15
8345     VK_ACCESS_FLAG_BITS_MAX_ENUM,  // Always match
8346     // VK_ACCESS_MEMORY_WRITE_BIT = 16
8347     VK_ACCESS_FLAG_BITS_MAX_ENUM,  // Always match
8348     // VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 17
8349     VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
8350     // VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 18
8351     VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
8352     // VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 19
8353     VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
8354     // VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 20
8355     VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
8356     // VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 21
8357     VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV | VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
8358     // VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 22
8359     VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
8360     // VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 23
8361     VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
8362     // VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 24
8363     VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
8364     // VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 25
8365     VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
8366     // VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 26
8367     VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
8368     // VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 27
8369     VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
8370 };
8371 
8372 // Verify that all bits of access_mask are supported by the src_stage_mask
ValidateAccessMaskPipelineStage(const DeviceExtensions & extensions,VkAccessFlags access_mask,VkPipelineStageFlags stage_mask)8373 static bool ValidateAccessMaskPipelineStage(const DeviceExtensions &extensions, VkAccessFlags access_mask,
8374                                             VkPipelineStageFlags stage_mask) {
8375     // Early out if all commands set, or access_mask NULL
8376     if ((stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) || (0 == access_mask)) return true;
8377 
8378     stage_mask = ExpandPipelineStageFlags(extensions, stage_mask);
8379     int index = 0;
8380     // for each of the set bits in access_mask, make sure that supporting stage mask bit(s) are set
8381     while (access_mask) {
8382         index = (u_ffs(access_mask) - 1);
8383         assert(index >= 0);
8384         // Must have "!= 0" compare to prevent warning from MSVC
8385         if ((AccessMaskToPipeStage[index] & stage_mask) == 0) return false;  // early out
8386         access_mask &= ~(1 << index);                                        // Mask off bit that's been checked
8387     }
8388     return true;
8389 }
8390 
8391 namespace barrier_queue_families {
8392 enum VuIndex {
8393     kSrcOrDstMustBeIgnore,
8394     kSpecialOrIgnoreOnly,
8395     kSrcIgnoreRequiresDstIgnore,
8396     kDstValidOrSpecialIfNotIgnore,
8397     kSrcValidOrSpecialIfNotIgnore,
8398     kSrcAndDestMustBeIgnore,
8399     kBothIgnoreOrBothValid,
8400     kSubmitQueueMustMatchSrcOrDst
8401 };
8402 static const char *vu_summary[] = {"Source or destination queue family must be ignored.",
8403                                    "Source or destination queue family must be special or ignored.",
8404                                    "Destination queue family must be ignored if source queue family is.",
8405                                    "Destination queue family must be valid, ignored, or special.",
8406                                    "Source queue family must be valid, ignored, or special.",
8407                                    "Source and destination queue family must both be ignored.",
8408                                    "Source and destination queue family must both be ignore or both valid.",
8409                                    "Source or destination queue family must match submit queue family, if not ignored."};
8410 
8411 static const std::string image_error_codes[] = {
8412     "VUID-VkImageMemoryBarrier-image-01381",  //   kSrcOrDstMustBeIgnore
8413     "VUID-VkImageMemoryBarrier-image-01766",  //   kSpecialOrIgnoreOnly
8414     "VUID-VkImageMemoryBarrier-image-01201",  //   kSrcIgnoreRequiresDstIgnore
8415     "VUID-VkImageMemoryBarrier-image-01768",  //   kDstValidOrSpecialIfNotIgnore
8416     "VUID-VkImageMemoryBarrier-image-01767",  //   kSrcValidOrSpecialIfNotIgnore
8417     "VUID-VkImageMemoryBarrier-image-01199",  //   kSrcAndDestMustBeIgnore
8418     "VUID-VkImageMemoryBarrier-image-01200",  //   kBothIgnoreOrBothValid
8419     "VUID-VkImageMemoryBarrier-image-01205",  //   kSubmitQueueMustMatchSrcOrDst
8420 };
8421 
8422 static const std::string buffer_error_codes[] = {
8423     "VUID-VkBufferMemoryBarrier-buffer-01191",  //  kSrcOrDstMustBeIgnore
8424     "VUID-VkBufferMemoryBarrier-buffer-01763",  //  kSpecialOrIgnoreOnly
8425     "VUID-VkBufferMemoryBarrier-buffer-01193",  //  kSrcIgnoreRequiresDstIgnore
8426     "VUID-VkBufferMemoryBarrier-buffer-01765",  //  kDstValidOrSpecialIfNotIgnore
8427     "VUID-VkBufferMemoryBarrier-buffer-01764",  //  kSrcValidOrSpecialIfNotIgnore
8428     "VUID-VkBufferMemoryBarrier-buffer-01190",  //  kSrcAndDestMustBeIgnore
8429     "VUID-VkBufferMemoryBarrier-buffer-01192",  //  kBothIgnoreOrBothValid
8430     "VUID-VkBufferMemoryBarrier-buffer-01196",  //  kSubmitQueueMustMatchSrcOrDst
8431 };
8432 
8433 class ValidatorState {
8434    public:
ValidatorState(const CoreChecks * device_data,const char * func_name,const CMD_BUFFER_STATE * cb_state,const VulkanTypedHandle & barrier_handle,const VkSharingMode sharing_mode,const std::string * val_codes)8435     ValidatorState(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
8436                    const VulkanTypedHandle &barrier_handle, const VkSharingMode sharing_mode, const std::string *val_codes)
8437         : report_data_(device_data->report_data),
8438           func_name_(func_name),
8439           cb_handle64_(HandleToUint64(cb_state->commandBuffer)),
8440           barrier_handle_(barrier_handle),
8441           sharing_mode_(sharing_mode),
8442           val_codes_(val_codes),
8443           limit_(static_cast<uint32_t>(device_data->physical_device_state->queue_family_properties.size())),
8444           mem_ext_(device_data->device_extensions.vk_khr_external_memory) {}
8445 
8446     // Create a validator state from an image state... reducing the image specific to the generic version.
ValidatorState(const CoreChecks * device_data,const char * func_name,const CMD_BUFFER_STATE * cb_state,const VkImageMemoryBarrier * barrier,const IMAGE_STATE * state)8447     ValidatorState(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
8448                    const VkImageMemoryBarrier *barrier, const IMAGE_STATE *state)
8449         : ValidatorState(device_data, func_name, cb_state, VulkanTypedHandle(barrier->image, kVulkanObjectTypeImage),
8450                          state->createInfo.sharingMode, image_error_codes) {}
8451 
8452     // Create a validator state from an buffer state... reducing the buffer specific to the generic version.
ValidatorState(const CoreChecks * device_data,const char * func_name,const CMD_BUFFER_STATE * cb_state,const VkBufferMemoryBarrier * barrier,const BUFFER_STATE * state)8453     ValidatorState(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
8454                    const VkBufferMemoryBarrier *barrier, const BUFFER_STATE *state)
8455         : ValidatorState(device_data, func_name, cb_state, VulkanTypedHandle(barrier->buffer, kVulkanObjectTypeBuffer),
8456                          state->createInfo.sharingMode, buffer_error_codes) {}
8457 
8458     // Log the messages using boilerplate from object state, and Vu specific information from the template arg
8459     // One and two family versions, in the single family version, Vu holds the name of the passed parameter
LogMsg(VuIndex vu_index,uint32_t family,const char * param_name) const8460     bool LogMsg(VuIndex vu_index, uint32_t family, const char *param_name) const {
8461         const std::string &val_code = val_codes_[vu_index];
8462         const char *annotation = GetFamilyAnnotation(family);
8463         return log_msg(report_data_, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_handle64_,
8464                        val_code, "%s: Barrier using %s %s created with sharingMode %s, has %s %u%s. %s", func_name_,
8465                        GetTypeString(), report_data_->FormatHandle(barrier_handle_).c_str(), GetModeString(), param_name, family,
8466                        annotation, vu_summary[vu_index]);
8467     }
8468 
LogMsg(VuIndex vu_index,uint32_t src_family,uint32_t dst_family) const8469     bool LogMsg(VuIndex vu_index, uint32_t src_family, uint32_t dst_family) const {
8470         const std::string &val_code = val_codes_[vu_index];
8471         const char *src_annotation = GetFamilyAnnotation(src_family);
8472         const char *dst_annotation = GetFamilyAnnotation(dst_family);
8473         return log_msg(
8474             report_data_, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_handle64_, val_code,
8475             "%s: Barrier using %s %s created with sharingMode %s, has srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
8476             func_name_, GetTypeString(), report_data_->FormatHandle(barrier_handle_).c_str(), GetModeString(), src_family,
8477             src_annotation, dst_family, dst_annotation, vu_summary[vu_index]);
8478     }
8479 
8480     // This abstract Vu can only be tested at submit time, thus we need a callback from the closure containing the needed
8481     // data. Note that the mem_barrier is copied to the closure as the lambda lifespan exceed the guarantees of validity for
8482     // application input.
ValidateAtQueueSubmit(const VkQueue queue,const CoreChecks * device_data,uint32_t src_family,uint32_t dst_family,const ValidatorState & val)8483     static bool ValidateAtQueueSubmit(const VkQueue queue, const CoreChecks *device_data, uint32_t src_family, uint32_t dst_family,
8484                                       const ValidatorState &val) {
8485         auto queue_data_it = device_data->queueMap.find(queue);
8486         if (queue_data_it == device_data->queueMap.end()) return false;
8487 
8488         uint32_t queue_family = queue_data_it->second.queueFamilyIndex;
8489         if ((src_family != queue_family) && (dst_family != queue_family)) {
8490             const std::string &val_code = val.val_codes_[kSubmitQueueMustMatchSrcOrDst];
8491             const char *src_annotation = val.GetFamilyAnnotation(src_family);
8492             const char *dst_annotation = val.GetFamilyAnnotation(dst_family);
8493             return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
8494                            HandleToUint64(queue), val_code,
8495                            "%s: Barrier submitted to queue with family index %u, using %s %s created with sharingMode %s, has "
8496                            "srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
8497                            "vkQueueSubmit", queue_family, val.GetTypeString(),
8498                            device_data->report_data->FormatHandle(val.barrier_handle_).c_str(), val.GetModeString(), src_family,
8499                            src_annotation, dst_family, dst_annotation, vu_summary[kSubmitQueueMustMatchSrcOrDst]);
8500         }
8501         return false;
8502     }
8503     // Logical helpers for semantic clarity
KhrExternalMem() const8504     inline bool KhrExternalMem() const { return mem_ext_; }
IsValid(uint32_t queue_family) const8505     inline bool IsValid(uint32_t queue_family) const { return (queue_family < limit_); }
IsValidOrSpecial(uint32_t queue_family) const8506     inline bool IsValidOrSpecial(uint32_t queue_family) const {
8507         return IsValid(queue_family) || (mem_ext_ && IsSpecial(queue_family));
8508     }
IsIgnored(uint32_t queue_family) const8509     inline bool IsIgnored(uint32_t queue_family) const { return queue_family == VK_QUEUE_FAMILY_IGNORED; }
8510 
8511     // Helpers for LogMsg (and log_msg)
GetModeString() const8512     const char *GetModeString() const { return string_VkSharingMode(sharing_mode_); }
8513 
8514     // Descriptive text for the various types of queue family index
GetFamilyAnnotation(uint32_t family) const8515     const char *GetFamilyAnnotation(uint32_t family) const {
8516         const char *external = " (VK_QUEUE_FAMILY_EXTERNAL_KHR)";
8517         const char *foreign = " (VK_QUEUE_FAMILY_FOREIGN_EXT)";
8518         const char *ignored = " (VK_QUEUE_FAMILY_IGNORED)";
8519         const char *valid = " (VALID)";
8520         const char *invalid = " (INVALID)";
8521         switch (family) {
8522             case VK_QUEUE_FAMILY_EXTERNAL_KHR:
8523                 return external;
8524             case VK_QUEUE_FAMILY_FOREIGN_EXT:
8525                 return foreign;
8526             case VK_QUEUE_FAMILY_IGNORED:
8527                 return ignored;
8528             default:
8529                 if (IsValid(family)) {
8530                     return valid;
8531                 }
8532                 return invalid;
8533         };
8534     }
GetTypeString() const8535     const char *GetTypeString() const { return object_string[barrier_handle_.type]; }
GetSharingMode() const8536     VkSharingMode GetSharingMode() const { return sharing_mode_; }
8537 
8538    protected:
8539     const debug_report_data *const report_data_;
8540     const char *const func_name_;
8541     const uint64_t cb_handle64_;
8542     const VulkanTypedHandle barrier_handle_;
8543     const VkSharingMode sharing_mode_;
8544     const std::string *val_codes_;
8545     const uint32_t limit_;
8546     const bool mem_ext_;
8547 };
8548 
Validate(const CoreChecks * device_data,const char * func_name,CMD_BUFFER_STATE * cb_state,const ValidatorState & val,const uint32_t src_queue_family,const uint32_t dst_queue_family)8549 bool Validate(const CoreChecks *device_data, const char *func_name, CMD_BUFFER_STATE *cb_state, const ValidatorState &val,
8550               const uint32_t src_queue_family, const uint32_t dst_queue_family) {
8551     bool skip = false;
8552 
8553     const bool mode_concurrent = val.GetSharingMode() == VK_SHARING_MODE_CONCURRENT;
8554     const bool src_ignored = val.IsIgnored(src_queue_family);
8555     const bool dst_ignored = val.IsIgnored(dst_queue_family);
8556     if (val.KhrExternalMem()) {
8557         if (mode_concurrent) {
8558             if (!(src_ignored || dst_ignored)) {
8559                 skip |= val.LogMsg(kSrcOrDstMustBeIgnore, src_queue_family, dst_queue_family);
8560             }
8561             if ((src_ignored && !(dst_ignored || IsSpecial(dst_queue_family))) ||
8562                 (dst_ignored && !(src_ignored || IsSpecial(src_queue_family)))) {
8563                 skip |= val.LogMsg(kSpecialOrIgnoreOnly, src_queue_family, dst_queue_family);
8564             }
8565         } else {
8566             // VK_SHARING_MODE_EXCLUSIVE
8567             if (src_ignored && !dst_ignored) {
8568                 skip |= val.LogMsg(kSrcIgnoreRequiresDstIgnore, src_queue_family, dst_queue_family);
8569             }
8570             if (!dst_ignored && !val.IsValidOrSpecial(dst_queue_family)) {
8571                 skip |= val.LogMsg(kDstValidOrSpecialIfNotIgnore, dst_queue_family, "dstQueueFamilyIndex");
8572             }
8573             if (!src_ignored && !val.IsValidOrSpecial(src_queue_family)) {
8574                 skip |= val.LogMsg(kSrcValidOrSpecialIfNotIgnore, src_queue_family, "srcQueueFamilyIndex");
8575             }
8576         }
8577     } else {
8578         // No memory extension
8579         if (mode_concurrent) {
8580             if (!src_ignored || !dst_ignored) {
8581                 skip |= val.LogMsg(kSrcAndDestMustBeIgnore, src_queue_family, dst_queue_family);
8582             }
8583         } else {
8584             // VK_SHARING_MODE_EXCLUSIVE
8585             if (!((src_ignored && dst_ignored) || (val.IsValid(src_queue_family) && val.IsValid(dst_queue_family)))) {
8586                 skip |= val.LogMsg(kBothIgnoreOrBothValid, src_queue_family, dst_queue_family);
8587             }
8588         }
8589     }
8590     if (!mode_concurrent && !src_ignored && !dst_ignored) {
8591         // Only enqueue submit time check if it is needed. If more submit time checks are added, change the criteria
8592         // TODO create a better named list, or rename the submit time lists to something that matches the broader usage...
8593         // Note: if we want to create a semantic that separates state lookup, validation, and state update this should go
8594         // to a local queue of update_state_actions or something.
8595         cb_state->eventUpdates.emplace_back([device_data, src_queue_family, dst_queue_family, val](VkQueue queue) {
8596             return ValidatorState::ValidateAtQueueSubmit(queue, device_data, src_queue_family, dst_queue_family, val);
8597         });
8598     }
8599     return skip;
8600 }
8601 }  // namespace barrier_queue_families
8602 
8603 // Type specific wrapper for image barriers
ValidateBarrierQueueFamilies(const char * func_name,CMD_BUFFER_STATE * cb_state,const VkImageMemoryBarrier & barrier,const IMAGE_STATE * state_data)8604 bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, CMD_BUFFER_STATE *cb_state,
8605                                               const VkImageMemoryBarrier &barrier, const IMAGE_STATE *state_data) {
8606     // State data is required
8607     if (!state_data) {
8608         return false;
8609     }
8610 
8611     // Create the validator state from the image state
8612     barrier_queue_families::ValidatorState val(this, func_name, cb_state, &barrier, state_data);
8613     const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
8614     const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
8615     return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
8616 }
8617 
8618 // Type specific wrapper for buffer barriers
ValidateBarrierQueueFamilies(const char * func_name,CMD_BUFFER_STATE * cb_state,const VkBufferMemoryBarrier & barrier,const BUFFER_STATE * state_data)8619 bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, CMD_BUFFER_STATE *cb_state,
8620                                               const VkBufferMemoryBarrier &barrier, const BUFFER_STATE *state_data) {
8621     // State data is required
8622     if (!state_data) {
8623         return false;
8624     }
8625 
8626     // Create the validator state from the buffer state
8627     barrier_queue_families::ValidatorState val(this, func_name, cb_state, &barrier, state_data);
8628     const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
8629     const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
8630     return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
8631 }
8632 
ValidateBarriers(const char * funcName,CMD_BUFFER_STATE * cb_state,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,uint32_t memBarrierCount,const VkMemoryBarrier * pMemBarriers,uint32_t bufferBarrierCount,const VkBufferMemoryBarrier * pBufferMemBarriers,uint32_t imageMemBarrierCount,const VkImageMemoryBarrier * pImageMemBarriers)8633 bool CoreChecks::ValidateBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state, VkPipelineStageFlags src_stage_mask,
8634                                   VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount,
8635                                   const VkMemoryBarrier *pMemBarriers, uint32_t bufferBarrierCount,
8636                                   const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
8637                                   const VkImageMemoryBarrier *pImageMemBarriers) {
8638     bool skip = false;
8639     for (uint32_t i = 0; i < memBarrierCount; ++i) {
8640         const auto &mem_barrier = pMemBarriers[i];
8641         if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
8642             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8643                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184",
8644                             "%s: pMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
8645                             mem_barrier.srcAccessMask, src_stage_mask);
8646         }
8647         if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
8648             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8649                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185",
8650                             "%s: pMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
8651                             mem_barrier.dstAccessMask, dst_stage_mask);
8652         }
8653     }
8654     for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
8655         const auto &mem_barrier = pImageMemBarriers[i];
8656         if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
8657             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8658                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184",
8659                             "%s: pImageMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
8660                             mem_barrier.srcAccessMask, src_stage_mask);
8661         }
8662         if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
8663             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8664                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185",
8665                             "%s: pImageMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
8666                             mem_barrier.dstAccessMask, dst_stage_mask);
8667         }
8668 
8669         auto image_data = GetImageState(mem_barrier.image);
8670         skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, image_data);
8671 
8672         if (mem_barrier.newLayout == VK_IMAGE_LAYOUT_UNDEFINED || mem_barrier.newLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
8673             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8674                             HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-newLayout-01198",
8675                             "%s: Image Layout cannot be transitioned to UNDEFINED or PREINITIALIZED.", funcName);
8676         }
8677 
8678         if (image_data) {
8679             // There is no VUID for this, but there is blanket text:
8680             //     "Non-sparse resources must be bound completely and contiguously to a single VkDeviceMemory object before
8681             //     recording commands in a command buffer."
8682             // TODO: Update this when VUID is defined
8683             skip |= ValidateMemoryIsBoundToImage(image_data, funcName, kVUIDUndefined);
8684 
8685             const auto aspect_mask = mem_barrier.subresourceRange.aspectMask;
8686             skip |= ValidateImageAspectMask(image_data->image, image_data->createInfo.format, aspect_mask, funcName);
8687 
8688             const std::string param_name = "pImageMemoryBarriers[" + std::to_string(i) + "].subresourceRange";
8689             skip |= ValidateImageBarrierSubresourceRange(image_data, mem_barrier.subresourceRange, funcName, param_name.c_str());
8690         }
8691     }
8692 
8693     for (uint32_t i = 0; i < bufferBarrierCount; ++i) {
8694         const auto &mem_barrier = pBufferMemBarriers[i];
8695 
8696         if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
8697             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8698                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184",
8699                             "%s: pBufferMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
8700                             mem_barrier.srcAccessMask, src_stage_mask);
8701         }
8702         if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
8703             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8704                             HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185",
8705                             "%s: pBufferMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
8706                             mem_barrier.dstAccessMask, dst_stage_mask);
8707         }
8708         // Validate buffer barrier queue family indices
8709         auto buffer_state = GetBufferState(mem_barrier.buffer);
8710         skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, buffer_state);
8711 
8712         if (buffer_state) {
8713             // There is no VUID for this, but there is blanket text:
8714             //     "Non-sparse resources must be bound completely and contiguously to a single VkDeviceMemory object before
8715             //     recording commands in a command buffer"
8716             // TODO: Update this when VUID is defined
8717             skip |= ValidateMemoryIsBoundToBuffer(buffer_state, funcName, kVUIDUndefined);
8718 
8719             auto buffer_size = buffer_state->createInfo.size;
8720             if (mem_barrier.offset >= buffer_size) {
8721                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8722                                 HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-offset-01187",
8723                                 "%s: Buffer Barrier %s has offset 0x%" PRIx64 " which is not less than total size 0x%" PRIx64 ".",
8724                                 funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(), HandleToUint64(mem_barrier.offset),
8725                                 HandleToUint64(buffer_size));
8726             } else if (mem_barrier.size != VK_WHOLE_SIZE && (mem_barrier.offset + mem_barrier.size > buffer_size)) {
8727                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8728                                 HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-size-01189",
8729                                 "%s: Buffer Barrier %s has offset 0x%" PRIx64 " and size 0x%" PRIx64
8730                                 " whose sum is greater than total size 0x%" PRIx64 ".",
8731                                 funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(), HandleToUint64(mem_barrier.offset),
8732                                 HandleToUint64(mem_barrier.size), HandleToUint64(buffer_size));
8733             }
8734         }
8735     }
8736 
8737     skip |= ValidateBarriersQFOTransferUniqueness(funcName, cb_state, bufferBarrierCount, pBufferMemBarriers, imageMemBarrierCount,
8738                                                   pImageMemBarriers);
8739 
8740     return skip;
8741 }
8742 
ValidateEventStageMask(VkQueue queue,CMD_BUFFER_STATE * pCB,uint32_t eventCount,size_t firstEventIndex,VkPipelineStageFlags sourceStageMask)8743 bool CoreChecks::ValidateEventStageMask(VkQueue queue, CMD_BUFFER_STATE *pCB, uint32_t eventCount, size_t firstEventIndex,
8744                                         VkPipelineStageFlags sourceStageMask) {
8745     bool skip = false;
8746     VkPipelineStageFlags stageMask = 0;
8747     for (uint32_t i = 0; i < eventCount; ++i) {
8748         auto event = pCB->events[firstEventIndex + i];
8749         auto queue_data = queueMap.find(queue);
8750         if (queue_data == queueMap.end()) return false;
8751         auto event_data = queue_data->second.eventToStageMap.find(event);
8752         if (event_data != queue_data->second.eventToStageMap.end()) {
8753             stageMask |= event_data->second;
8754         } else {
8755             auto global_event_data = GetEventState(event);
8756             if (!global_event_data) {
8757                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
8758                                 HandleToUint64(event), kVUID_Core_DrawState_InvalidEvent,
8759                                 "%s cannot be waited on if it has never been set.", report_data->FormatHandle(event).c_str());
8760             } else {
8761                 stageMask |= global_event_data->stageMask;
8762             }
8763         }
8764     }
8765     // TODO: Need to validate that host_bit is only set if set event is called
8766     // but set event can be called at any time.
8767     if (sourceStageMask != stageMask && sourceStageMask != (stageMask | VK_PIPELINE_STAGE_HOST_BIT)) {
8768         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8769                         HandleToUint64(pCB->commandBuffer), "VUID-vkCmdWaitEvents-srcStageMask-parameter",
8770                         "Submitting cmdbuffer with call to VkCmdWaitEvents using srcStageMask 0x%X which must be the bitwise OR of "
8771                         "the stageMask parameters used in calls to vkCmdSetEvent and VK_PIPELINE_STAGE_HOST_BIT if used with "
8772                         "vkSetEvent but instead is 0x%X.",
8773                         sourceStageMask, stageMask);
8774     }
8775     return skip;
8776 }
8777 
8778 // Note that we only check bits that HAVE required queueflags -- don't care entries are skipped
8779 static std::unordered_map<VkPipelineStageFlags, VkQueueFlags> supported_pipeline_stages_table = {
8780     {VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
8781     {VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
8782     {VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_QUEUE_GRAPHICS_BIT},
8783     {VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
8784     {VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
8785     {VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
8786     {VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
8787     {VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
8788     {VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, VK_QUEUE_GRAPHICS_BIT},
8789     {VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_QUEUE_GRAPHICS_BIT},
8790     {VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_QUEUE_GRAPHICS_BIT},
8791     {VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_QUEUE_COMPUTE_BIT},
8792     {VK_PIPELINE_STAGE_TRANSFER_BIT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT},
8793     {VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_QUEUE_GRAPHICS_BIT}};
8794 
8795 static const VkPipelineStageFlags stage_flag_bit_array[] = {VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
8796                                                             VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
8797                                                             VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
8798                                                             VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
8799                                                             VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
8800                                                             VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
8801                                                             VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
8802                                                             VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
8803                                                             VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
8804                                                             VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
8805                                                             VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
8806                                                             VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
8807                                                             VK_PIPELINE_STAGE_TRANSFER_BIT,
8808                                                             VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT};
8809 
CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer,VkPipelineStageFlags stage_mask,VkQueueFlags queue_flags,const char * function,const char * src_or_dest,const char * error_code)8810 bool CoreChecks::CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask,
8811                                                   VkQueueFlags queue_flags, const char *function, const char *src_or_dest,
8812                                                   const char *error_code) {
8813     bool skip = false;
8814     // Lookup each bit in the stagemask and check for overlap between its table bits and queue_flags
8815     for (const auto &item : stage_flag_bit_array) {
8816         if (stage_mask & item) {
8817             if ((supported_pipeline_stages_table[item] & queue_flags) == 0) {
8818                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
8819                                 HandleToUint64(command_buffer), error_code,
8820                                 "%s(): %s flag %s is not compatible with the queue family properties of this command buffer.",
8821                                 function, src_or_dest, string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(item)));
8822             }
8823         }
8824     }
8825     return skip;
8826 }
8827 
8828 // Check if all barriers are of a given operation type.
8829 template <typename Barrier, typename OpCheck>
AllTransferOp(const COMMAND_POOL_STATE * pool,OpCheck & op_check,uint32_t count,const Barrier * barriers)8830 bool AllTransferOp(const COMMAND_POOL_STATE *pool, OpCheck &op_check, uint32_t count, const Barrier *barriers) {
8831     if (!pool) return false;
8832 
8833     for (uint32_t b = 0; b < count; b++) {
8834         if (!op_check(pool, barriers + b)) return false;
8835     }
8836     return true;
8837 }
8838 
8839 // Look at the barriers to see if we they are all release or all acquire, the result impacts queue properties validation
ComputeBarrierOperationsType(CMD_BUFFER_STATE * cb_state,uint32_t buffer_barrier_count,const VkBufferMemoryBarrier * buffer_barriers,uint32_t image_barrier_count,const VkImageMemoryBarrier * image_barriers)8840 BarrierOperationsType CoreChecks::ComputeBarrierOperationsType(CMD_BUFFER_STATE *cb_state, uint32_t buffer_barrier_count,
8841                                                                const VkBufferMemoryBarrier *buffer_barriers,
8842                                                                uint32_t image_barrier_count,
8843                                                                const VkImageMemoryBarrier *image_barriers) {
8844     auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
8845     BarrierOperationsType op_type = kGeneral;
8846 
8847     // Look at the barrier details only if they exist
8848     // Note: AllTransferOp returns true for count == 0
8849     if ((buffer_barrier_count + image_barrier_count) != 0) {
8850         if (AllTransferOp(pool, TempIsReleaseOp<VkBufferMemoryBarrier>, buffer_barrier_count, buffer_barriers) &&
8851             AllTransferOp(pool, TempIsReleaseOp<VkImageMemoryBarrier>, image_barrier_count, image_barriers)) {
8852             op_type = kAllRelease;
8853         } else if (AllTransferOp(pool, IsAcquireOp<VkBufferMemoryBarrier>, buffer_barrier_count, buffer_barriers) &&
8854                    AllTransferOp(pool, IsAcquireOp<VkImageMemoryBarrier>, image_barrier_count, image_barriers)) {
8855             op_type = kAllAcquire;
8856         }
8857     }
8858 
8859     return op_type;
8860 }
8861 
ValidateStageMasksAgainstQueueCapabilities(CMD_BUFFER_STATE const * cb_state,VkPipelineStageFlags source_stage_mask,VkPipelineStageFlags dest_stage_mask,BarrierOperationsType barrier_op_type,const char * function,const char * error_code)8862 bool CoreChecks::ValidateStageMasksAgainstQueueCapabilities(CMD_BUFFER_STATE const *cb_state,
8863                                                             VkPipelineStageFlags source_stage_mask,
8864                                                             VkPipelineStageFlags dest_stage_mask,
8865                                                             BarrierOperationsType barrier_op_type, const char *function,
8866                                                             const char *error_code) {
8867     bool skip = false;
8868     uint32_t queue_family_index = commandPoolMap[cb_state->createInfo.commandPool].get()->queueFamilyIndex;
8869     auto physical_device_state = GetPhysicalDeviceState();
8870 
8871     // Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family
8872     // specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool
8873     // that commandBuffer was allocated from, as specified in the table of supported pipeline stages.
8874 
8875     if (queue_family_index < physical_device_state->queue_family_properties.size()) {
8876         VkQueueFlags specified_queue_flags = physical_device_state->queue_family_properties[queue_family_index].queueFlags;
8877 
8878         // Only check the source stage mask if any barriers aren't "acquire ownership"
8879         if ((barrier_op_type != kAllAcquire) && (source_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
8880             skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, source_stage_mask, specified_queue_flags, function,
8881                                                      "srcStageMask", error_code);
8882         }
8883         // Only check the dest stage mask if any barriers aren't "release ownership"
8884         if ((barrier_op_type != kAllRelease) && (dest_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
8885             skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, dest_stage_mask, specified_queue_flags, function,
8886                                                      "dstStageMask", error_code);
8887         }
8888     }
8889     return skip;
8890 }
8891 
PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags sourceStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)8892 bool CoreChecks::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
8893                                               VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
8894                                               uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
8895                                               uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
8896                                               uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
8897     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8898     assert(cb_state);
8899 
8900     auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
8901                                                         imageMemoryBarrierCount, pImageMemoryBarriers);
8902     bool skip = ValidateStageMasksAgainstQueueCapabilities(cb_state, sourceStageMask, dstStageMask, barrier_op_type,
8903                                                            "vkCmdWaitEvents", "VUID-vkCmdWaitEvents-srcStageMask-01164");
8904     skip |= ValidateStageMaskGsTsEnables(sourceStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-srcStageMask-01159",
8905                                          "VUID-vkCmdWaitEvents-srcStageMask-01161", "VUID-vkCmdWaitEvents-srcStageMask-02111",
8906                                          "VUID-vkCmdWaitEvents-srcStageMask-02112");
8907     skip |= ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-dstStageMask-01160",
8908                                          "VUID-vkCmdWaitEvents-dstStageMask-01162", "VUID-vkCmdWaitEvents-dstStageMask-02113",
8909                                          "VUID-vkCmdWaitEvents-dstStageMask-02114");
8910     skip |= ValidateCmdQueueFlags(cb_state, "vkCmdWaitEvents()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
8911                                   "VUID-vkCmdWaitEvents-commandBuffer-cmdpool");
8912     skip |= ValidateCmd(cb_state, CMD_WAITEVENTS, "vkCmdWaitEvents()");
8913     skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdWaitEvents()");
8914     skip |= ValidateBarriers("vkCmdWaitEvents()", cb_state, sourceStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
8915                              bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
8916     return skip;
8917 }
8918 
PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags sourceStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)8919 void CoreChecks::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
8920                                             VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
8921                                             uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
8922                                             uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
8923                                             uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
8924     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8925     auto first_event_index = cb_state->events.size();
8926     for (uint32_t i = 0; i < eventCount; ++i) {
8927         auto event_state = GetEventState(pEvents[i]);
8928         if (event_state) {
8929             AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent), cb_state);
8930             event_state->cb_bindings.insert(cb_state);
8931         }
8932         cb_state->waitedEvents.insert(pEvents[i]);
8933         cb_state->events.push_back(pEvents[i]);
8934     }
8935     cb_state->eventUpdates.emplace_back(
8936         [=](VkQueue q) { return ValidateEventStageMask(q, cb_state, eventCount, first_event_index, sourceStageMask); });
8937     TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
8938     if (enabled.gpu_validation) {
8939         GpuPreCallValidateCmdWaitEvents(sourceStageMask);
8940     }
8941 }
8942 
PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags sourceStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)8943 void CoreChecks::PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
8944                                              VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
8945                                              uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
8946                                              uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
8947                                              uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
8948     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8949     RecordBarriersQFOTransfers(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
8950                                pImageMemoryBarriers);
8951 }
8952 
PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)8953 bool CoreChecks::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
8954                                                    VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
8955                                                    uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
8956                                                    uint32_t bufferMemoryBarrierCount,
8957                                                    const VkBufferMemoryBarrier *pBufferMemoryBarriers,
8958                                                    uint32_t imageMemoryBarrierCount,
8959                                                    const VkImageMemoryBarrier *pImageMemoryBarriers) {
8960     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8961     assert(cb_state);
8962 
8963     bool skip = false;
8964     auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
8965                                                         imageMemoryBarrierCount, pImageMemoryBarriers);
8966     skip |= ValidateStageMasksAgainstQueueCapabilities(cb_state, srcStageMask, dstStageMask, barrier_op_type,
8967                                                        "vkCmdPipelineBarrier", "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
8968     skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPipelineBarrier()",
8969                                   VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
8970                                   "VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool");
8971     skip |= ValidateCmd(cb_state, CMD_PIPELINEBARRIER, "vkCmdPipelineBarrier()");
8972     skip |=
8973         ValidateStageMaskGsTsEnables(srcStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-srcStageMask-01168",
8974                                      "VUID-vkCmdPipelineBarrier-srcStageMask-01170", "VUID-vkCmdPipelineBarrier-srcStageMask-02115",
8975                                      "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
8976     skip |=
8977         ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-dstStageMask-01169",
8978                                      "VUID-vkCmdPipelineBarrier-dstStageMask-01171", "VUID-vkCmdPipelineBarrier-dstStageMask-02117",
8979                                      "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
8980     if (cb_state->activeRenderPass) {
8981         skip |= ValidateRenderPassPipelineBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, dependencyFlags,
8982                                                    memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
8983                                                    pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
8984         if (skip) return true;  // Early return to avoid redundant errors from below calls
8985     }
8986     skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdPipelineBarrier()");
8987     skip |= ValidateBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
8988                              bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
8989     return skip;
8990 }
8991 
PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)8992 void CoreChecks::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
8993                                                  VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
8994                                                  uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
8995                                                  uint32_t bufferMemoryBarrierCount,
8996                                                  const VkBufferMemoryBarrier *pBufferMemoryBarriers,
8997                                                  uint32_t imageMemoryBarrierCount,
8998                                                  const VkImageMemoryBarrier *pImageMemoryBarriers) {
8999     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9000 
9001     RecordBarriersQFOTransfers(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
9002                                pImageMemoryBarriers);
9003     TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
9004 }
9005 
SetQueryState(VkQueue queue,VkCommandBuffer commandBuffer,QueryObject object,QueryState value)9006 bool ValidationStateTracker::SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, QueryState value) {
9007     CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
9008     if (pCB) {
9009         pCB->queryToStateMap[object] = value;
9010     }
9011     auto queue_data = queueMap.find(queue);
9012     if (queue_data != queueMap.end()) {
9013         queue_data->second.queryToStateMap[object] = value;
9014     }
9015     return false;
9016 }
9017 
SetQueryStateMulti(VkQueue queue,VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,QueryState value)9018 bool ValidationStateTracker::SetQueryStateMulti(VkQueue queue, VkCommandBuffer commandBuffer, VkQueryPool queryPool,
9019                                                 uint32_t firstQuery, uint32_t queryCount, QueryState value) {
9020     CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
9021     auto queue_data = queueMap.find(queue);
9022 
9023     for (uint32_t i = 0; i < queryCount; i++) {
9024         QueryObject object = {queryPool, firstQuery + i};
9025         if (pCB) {
9026             pCB->queryToStateMap[object] = value;
9027         }
9028         if (queue_data != queueMap.end()) {
9029             queue_data->second.queryToStateMap[object] = value;
9030         }
9031     }
9032     return false;
9033 }
9034 
ValidateBeginQuery(const CMD_BUFFER_STATE * cb_state,const QueryObject & query_obj,VkFlags flags,CMD_TYPE cmd,const char * cmd_name,const char * vuid_queue_flags,const char * vuid_queue_feedback,const char * vuid_queue_occlusion,const char * vuid_precise,const char * vuid_query_count) const9035 bool CoreChecks::ValidateBeginQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, VkFlags flags, CMD_TYPE cmd,
9036                                     const char *cmd_name, const char *vuid_queue_flags, const char *vuid_queue_feedback,
9037                                     const char *vuid_queue_occlusion, const char *vuid_precise,
9038                                     const char *vuid_query_count) const {
9039     bool skip = false;
9040     const auto &query_pool_ci = GetQueryPoolState(query_obj.pool)->createInfo;
9041 
9042     // There are tighter queue constraints to test for certain query pools
9043     if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
9044         skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuid_queue_feedback);
9045     }
9046     if (query_pool_ci.queryType == VK_QUERY_TYPE_OCCLUSION) {
9047         skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuid_queue_occlusion);
9048     }
9049 
9050     skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuid_queue_flags);
9051 
9052     if (flags & VK_QUERY_CONTROL_PRECISE_BIT) {
9053         if (!enabled_features.core.occlusionQueryPrecise) {
9054             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9055                             HandleToUint64(cb_state->commandBuffer), vuid_precise,
9056                             "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but precise occlusion queries not enabled on the device.",
9057                             cmd_name);
9058         }
9059 
9060         if (query_pool_ci.queryType != VK_QUERY_TYPE_OCCLUSION) {
9061             skip |=
9062                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9063                         HandleToUint64(cb_state->commandBuffer), vuid_precise,
9064                         "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but pool query type is not VK_QUERY_TYPE_OCCLUSION", cmd_name);
9065         }
9066     }
9067 
9068     if (query_obj.query >= query_pool_ci.queryCount) {
9069         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9070                         HandleToUint64(cb_state->commandBuffer), vuid_query_count,
9071                         "%s: Query index %" PRIu32 " must be less than query count %" PRIu32 " of %s.", cmd_name, query_obj.query,
9072                         query_pool_ci.queryCount, report_data->FormatHandle(query_obj.pool).c_str());
9073     }
9074 
9075     skip |= ValidateCmd(cb_state, cmd, cmd_name);
9076     return skip;
9077 }
9078 
RecordCmdBeginQuery(CMD_BUFFER_STATE * cb_state,const QueryObject & query_obj)9079 void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
9080     cb_state->activeQueries.insert(query_obj);
9081     cb_state->startedQueries.insert(query_obj);
9082     cb_state->queryUpdates.emplace_back([this, cb_state, query_obj](VkQueue q) {
9083         SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_RUNNING);
9084         return false;
9085     });
9086     AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
9087                             VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
9088 }
9089 
PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot,VkFlags flags)9090 bool CoreChecks::PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
9091     if (disabled.query_validation) return false;
9092     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9093     assert(cb_state);
9094     QueryObject query_obj(queryPool, slot);
9095     return ValidateBeginQuery(cb_state, query_obj, flags, CMD_BEGINQUERY, "vkCmdBeginQuery()",
9096                               "VUID-vkCmdBeginQuery-commandBuffer-cmdpool", "VUID-vkCmdBeginQuery-queryType-02327",
9097                               "VUID-vkCmdBeginQuery-queryType-00803", "VUID-vkCmdBeginQuery-queryType-00800",
9098                               "VUID-vkCmdBeginQuery-query-00802");
9099 }
9100 
VerifyQueryIsReset(VkQueue queue,VkCommandBuffer commandBuffer,QueryObject query_obj) const9101 bool CoreChecks::VerifyQueryIsReset(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject query_obj) const {
9102     bool skip = false;
9103 
9104     auto queue_data = GetQueueState(queue);
9105     if (!queue_data) return false;
9106 
9107     QueryState state = GetQueryState(queue_data, query_obj.pool, query_obj.query);
9108     if (state != QUERYSTATE_RESET) {
9109         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9110                         HandleToUint64(commandBuffer), kVUID_Core_DrawState_QueryNotReset,
9111                         "vkCmdBeginQuery(): %s and query %" PRIu32
9112                         ": query not reset. "
9113                         "After query pool creation, each query must be reset before it is used. "
9114                         "Queries must also be reset between uses.",
9115                         report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
9116     }
9117 
9118     return skip;
9119 }
9120 
PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot,VkFlags flags)9121 void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
9122                                                          VkFlags flags) {
9123     QueryObject query = {queryPool, slot};
9124     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9125     RecordCmdBeginQuery(cb_state, query);
9126 }
9127 
EnqueueVerifyBeginQuery(VkCommandBuffer command_buffer,const QueryObject & query_obj)9128 void CoreChecks::EnqueueVerifyBeginQuery(VkCommandBuffer command_buffer, const QueryObject &query_obj) {
9129     CMD_BUFFER_STATE *cb_state = GetCBState(command_buffer);
9130 
9131     // Enqueue the submit time validation here, ahead of the submit time state update in the StateTracker's PostCallRecord
9132     cb_state->queryUpdates.emplace_back(
9133         [this, cb_state, query_obj](VkQueue q) { return VerifyQueryIsReset(q, cb_state->commandBuffer, query_obj); });
9134 }
9135 
PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot,VkFlags flags)9136 void CoreChecks::PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
9137     QueryObject query_obj = {queryPool, slot};
9138     EnqueueVerifyBeginQuery(commandBuffer, query_obj);
9139 }
9140 
ValidateCmdEndQuery(const CMD_BUFFER_STATE * cb_state,const QueryObject & query_obj,CMD_TYPE cmd,const char * cmd_name,const char * vuid_queue_flags,const char * vuid_active_queries) const9141 bool CoreChecks::ValidateCmdEndQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, CMD_TYPE cmd,
9142                                      const char *cmd_name, const char *vuid_queue_flags, const char *vuid_active_queries) const {
9143     bool skip = false;
9144     if (!cb_state->activeQueries.count(query_obj)) {
9145         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9146                         HandleToUint64(cb_state->commandBuffer), vuid_active_queries,
9147                         "%s: Ending a query before it was started: %s, index %d.", cmd_name,
9148                         report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
9149     }
9150     skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuid_queue_flags);
9151     skip |= ValidateCmd(cb_state, cmd, cmd_name);
9152     return skip;
9153 }
9154 
PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot)9155 bool CoreChecks::PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
9156     if (disabled.query_validation) return false;
9157     QueryObject query_obj = {queryPool, slot};
9158     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9159     assert(cb_state);
9160     return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERY, "vkCmdEndQuery()", "VUID-vkCmdEndQuery-commandBuffer-cmdpool",
9161                                "VUID-vkCmdEndQuery-None-01923");
9162 }
9163 
RecordCmdEndQuery(CMD_BUFFER_STATE * cb_state,const QueryObject & query_obj)9164 void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
9165     cb_state->activeQueries.erase(query_obj);
9166     cb_state->queryUpdates.emplace_back(
9167         [this, cb_state, query_obj](VkQueue q) { return SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_ENDED); });
9168     AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
9169                             VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
9170 }
9171 
PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot)9172 void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
9173     QueryObject query_obj = {queryPool, slot};
9174     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9175     RecordCmdEndQuery(cb_state, query_obj);
9176 }
9177 
PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)9178 bool CoreChecks::PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
9179                                                   uint32_t queryCount) {
9180     if (disabled.query_validation) return false;
9181     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9182 
9183     bool skip = InsideRenderPass(cb_state, "vkCmdResetQueryPool()", "VUID-vkCmdResetQueryPool-renderpass");
9184     skip |= ValidateCmd(cb_state, CMD_RESETQUERYPOOL, "VkCmdResetQueryPool()");
9185     skip |= ValidateCmdQueueFlags(cb_state, "VkCmdResetQueryPool()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
9186                                   "VUID-vkCmdResetQueryPool-commandBuffer-cmdpool");
9187     return skip;
9188 }
9189 
PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)9190 void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
9191                                                              uint32_t firstQuery, uint32_t queryCount) {
9192     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9193 
9194     cb_state->queryUpdates.emplace_back([this, commandBuffer, queryPool, firstQuery, queryCount](VkQueue q) {
9195         return SetQueryStateMulti(q, commandBuffer, queryPool, firstQuery, queryCount, QUERYSTATE_RESET);
9196     });
9197     AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
9198                             cb_state);
9199 }
9200 
GetQueryState(const QUEUE_STATE * queue_data,VkQueryPool queryPool,uint32_t queryIndex) const9201 QueryState CoreChecks::GetQueryState(const QUEUE_STATE *queue_data, VkQueryPool queryPool, uint32_t queryIndex) const {
9202     QueryObject query = {queryPool, queryIndex};
9203 
9204     const std::array<const decltype(queryToStateMap) *, 2> map_list = {{&queue_data->queryToStateMap, &queryToStateMap}};
9205 
9206     for (const auto map : map_list) {
9207         auto query_data = map->find(query);
9208         if (query_data != map->end()) {
9209             return query_data->second;
9210         }
9211     }
9212     return QUERYSTATE_UNKNOWN;
9213 }
9214 
GetQueryResultType(QueryState state,VkQueryResultFlags flags)9215 static QueryResultType GetQueryResultType(QueryState state, VkQueryResultFlags flags) {
9216     switch (state) {
9217         case QUERYSTATE_UNKNOWN:
9218             return QUERYRESULT_UNKNOWN;
9219         case QUERYSTATE_RESET:
9220         case QUERYSTATE_RUNNING:
9221             if (flags & VK_QUERY_RESULT_WAIT_BIT) {
9222                 return ((state == QUERYSTATE_RESET) ? QUERYRESULT_WAIT_ON_RESET : QUERYRESULT_WAIT_ON_RUNNING);
9223             } else if ((flags & VK_QUERY_RESULT_PARTIAL_BIT) || (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
9224                 return QUERYRESULT_SOME_DATA;
9225             } else {
9226                 return QUERYRESULT_NO_DATA;
9227             }
9228         case QUERYSTATE_ENDED:
9229             if ((flags & VK_QUERY_RESULT_WAIT_BIT) || (flags & VK_QUERY_RESULT_PARTIAL_BIT) ||
9230                 (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
9231                 return QUERYRESULT_SOME_DATA;
9232             } else {
9233                 return QUERYRESULT_MAYBE_NO_DATA;
9234             }
9235         case QUERYSTATE_AVAILABLE:
9236             return QUERYRESULT_SOME_DATA;
9237     }
9238     assert(false);
9239     return QUERYRESULT_UNKNOWN;
9240 }
9241 
ValidateQuery(VkQueue queue,CMD_BUFFER_STATE * pCB,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkQueryResultFlags flags) const9242 bool CoreChecks::ValidateQuery(VkQueue queue, CMD_BUFFER_STATE *pCB, VkQueryPool queryPool, uint32_t firstQuery,
9243                                uint32_t queryCount, VkQueryResultFlags flags) const {
9244     bool skip = false;
9245     auto queue_data = GetQueueState(queue);
9246     if (!queue_data) return false;
9247     for (uint32_t i = 0; i < queryCount; i++) {
9248         QueryState state = GetQueryState(queue_data, queryPool, firstQuery + i);
9249         QueryResultType result_type = GetQueryResultType(state, flags);
9250         if (result_type != QUERYRESULT_SOME_DATA) {
9251             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9252                             HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidQuery,
9253                             "Requesting a copy from query to buffer on %s query %" PRIu32 ": %s",
9254                             report_data->FormatHandle(queryPool).c_str(), firstQuery + i, string_QueryResultType(result_type));
9255         }
9256     }
9257     return skip;
9258 }
9259 
PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)9260 bool CoreChecks::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
9261                                                         uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
9262                                                         VkDeviceSize stride, VkQueryResultFlags flags) {
9263     if (disabled.query_validation) return false;
9264     const auto cb_state = GetCBState(commandBuffer);
9265     const auto dst_buff_state = GetBufferState(dstBuffer);
9266     assert(cb_state);
9267     assert(dst_buff_state);
9268     bool skip = ValidateMemoryIsBoundToBuffer(dst_buff_state, "vkCmdCopyQueryPoolResults()",
9269                                               "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826");
9270     skip |= ValidateQueryPoolStride("VUID-vkCmdCopyQueryPoolResults-flags-00822", "VUID-vkCmdCopyQueryPoolResults-flags-00823",
9271                                     stride, "dstOffset", dstOffset, flags);
9272     // Validate that DST buffer has correct usage flags set
9273     skip |= ValidateBufferUsageFlags(dst_buff_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
9274                                      "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825", "vkCmdCopyQueryPoolResults()",
9275                                      "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
9276     skip |= ValidateCmdQueueFlags(cb_state, "vkCmdCopyQueryPoolResults()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
9277                                   "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool");
9278     skip |= ValidateCmd(cb_state, CMD_COPYQUERYPOOLRESULTS, "vkCmdCopyQueryPoolResults()");
9279     skip |= InsideRenderPass(cb_state, "vkCmdCopyQueryPoolResults()", "VUID-vkCmdCopyQueryPoolResults-renderpass");
9280     return skip;
9281 }
9282 
PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)9283 void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
9284                                                                    uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
9285                                                                    VkDeviceSize dstOffset, VkDeviceSize stride,
9286                                                                    VkQueryResultFlags flags) {
9287     auto cb_state = GetCBState(commandBuffer);
9288     auto dst_buff_state = GetBufferState(dstBuffer);
9289     AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
9290     AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
9291                             cb_state);
9292 }
9293 
PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)9294 void CoreChecks::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
9295                                                       uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
9296                                                       VkDeviceSize stride, VkQueryResultFlags flags) {
9297     auto cb_state = GetCBState(commandBuffer);
9298     cb_state->queryUpdates.emplace_back([this, cb_state, queryPool, firstQuery, queryCount, flags](VkQueue q) {
9299         return ValidateQuery(q, cb_state, queryPool, firstQuery, queryCount, flags);
9300     });
9301 }
9302 
PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)9303 bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
9304                                                  VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
9305                                                  const void *pValues) {
9306     bool skip = false;
9307     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9308     assert(cb_state);
9309     skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPushConstants()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
9310                                   "VUID-vkCmdPushConstants-commandBuffer-cmdpool");
9311     skip |= ValidateCmd(cb_state, CMD_PUSHCONSTANTS, "vkCmdPushConstants()");
9312     skip |= ValidatePushConstantRange(offset, size, "vkCmdPushConstants()");
9313     if (0 == stageFlags) {
9314         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9315                         HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-stageFlags-requiredbitmask",
9316                         "vkCmdPushConstants() call has no stageFlags set.");
9317     }
9318 
9319     // Check if pipeline_layout VkPushConstantRange(s) overlapping offset, size have stageFlags set for each stage in the command
9320     // stageFlags argument, *and* that the command stageFlags argument has bits set for the stageFlags in each overlapping range.
9321     if (!skip) {
9322         const auto &ranges = *GetPipelineLayout(layout)->push_constant_ranges;
9323         VkShaderStageFlags found_stages = 0;
9324         for (const auto &range : ranges) {
9325             if ((offset >= range.offset) && (offset + size <= range.offset + range.size)) {
9326                 VkShaderStageFlags matching_stages = range.stageFlags & stageFlags;
9327                 if (matching_stages != range.stageFlags) {
9328                     // "VUID-vkCmdPushConstants-offset-01796" VUID-vkCmdPushConstants-offset-01796
9329                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9330                                     HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01796",
9331                                     "vkCmdPushConstants(): stageFlags (0x%" PRIx32 ", offset (%" PRIu32 "), and size (%" PRIu32
9332                                     "),  must contain all stages in overlapping VkPushConstantRange stageFlags (0x%" PRIx32
9333                                     "), offset (%" PRIu32 "), and size (%" PRIu32 ") in %s.",
9334                                     (uint32_t)stageFlags, offset, size, (uint32_t)range.stageFlags, range.offset, range.size,
9335                                     report_data->FormatHandle(layout).c_str());
9336                 }
9337 
9338                 // Accumulate all stages we've found
9339                 found_stages = matching_stages | found_stages;
9340             }
9341         }
9342         if (found_stages != stageFlags) {
9343             // "VUID-vkCmdPushConstants-offset-01795" VUID-vkCmdPushConstants-offset-01795
9344             uint32_t missing_stages = ~found_stages & stageFlags;
9345             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
9346                             HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01795",
9347                             "vkCmdPushConstants(): stageFlags = 0x%" PRIx32
9348                             ", VkPushConstantRange in %s overlapping offset = %d and size = %d, do not contain "
9349                             "stageFlags 0x%" PRIx32 ".",
9350                             (uint32_t)stageFlags, report_data->FormatHandle(layout).c_str(), offset, size, missing_stages);
9351         }
9352     }
9353     return skip;
9354 }
9355 
PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot)9356 bool CoreChecks::PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
9357                                                   VkQueryPool queryPool, uint32_t slot) {
9358     if (disabled.query_validation) return false;
9359     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9360     assert(cb_state);
9361     bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdWriteTimestamp()",
9362                                       VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
9363                                       "VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool");
9364     skip |= ValidateCmd(cb_state, CMD_WRITETIMESTAMP, "vkCmdWriteTimestamp()");
9365     return skip;
9366 }
9367 
PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot)9368 void CoreChecks::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
9369                                                  VkQueryPool queryPool, uint32_t slot) {
9370     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
9371     QueryObject query = {queryPool, slot};
9372     cb_state->queryUpdates.emplace_back([this, commandBuffer, query](VkQueue q) {
9373         bool skip = false;
9374         skip |= VerifyQueryIsReset(q, commandBuffer, query);
9375         skip |= SetQueryState(q, commandBuffer, query, QUERYSTATE_ENDED);
9376         return skip;
9377     });
9378     AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
9379                             cb_state);
9380 }
9381 
MatchUsage(uint32_t count,const VkAttachmentReference2KHR * attachments,const VkFramebufferCreateInfo * fbci,VkImageUsageFlagBits usage_flag,const char * error_code) const9382 bool CoreChecks::MatchUsage(uint32_t count, const VkAttachmentReference2KHR *attachments, const VkFramebufferCreateInfo *fbci,
9383                             VkImageUsageFlagBits usage_flag, const char *error_code) const {
9384     bool skip = false;
9385 
9386     if (attachments) {
9387         for (uint32_t attach = 0; attach < count; attach++) {
9388             if (attachments[attach].attachment != VK_ATTACHMENT_UNUSED) {
9389                 // Attachment counts are verified elsewhere, but prevent an invalid access
9390                 if (attachments[attach].attachment < fbci->attachmentCount) {
9391                     if ((fbci->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
9392                         const VkImageView *image_view = &fbci->pAttachments[attachments[attach].attachment];
9393                         auto view_state = GetImageViewState(*image_view);
9394                         if (view_state) {
9395                             const VkImageCreateInfo *ici = &GetImageState(view_state->create_info.image)->createInfo;
9396                             if (ici != nullptr) {
9397                                 if ((ici->usage & usage_flag) == 0) {
9398                                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
9399                                                     VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, error_code,
9400                                                     "vkCreateFramebuffer:  Framebuffer Attachment (%d) conflicts with the image's "
9401                                                     "IMAGE_USAGE flags (%s).",
9402                                                     attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
9403                                 }
9404                             }
9405                         }
9406                     } else {
9407                         const VkFramebufferAttachmentsCreateInfoKHR *fbaci =
9408                             lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(fbci->pNext);
9409                         if (fbaci != nullptr && fbaci->pAttachmentImageInfos != nullptr &&
9410                             fbaci->attachmentImageInfoCount > attachments[attach].attachment) {
9411                             uint32_t image_usage = fbaci->pAttachmentImageInfos[attachments[attach].attachment].usage;
9412                             if ((image_usage & usage_flag) == 0) {
9413                                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
9414                                                 0, error_code,
9415                                                 "vkCreateFramebuffer:  Framebuffer attachment info (%d) conflicts with the image's "
9416                                                 "IMAGE_USAGE flags (%s).",
9417                                                 attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
9418                             }
9419                         }
9420                     }
9421                 }
9422             }
9423         }
9424     }
9425     return skip;
9426 }
9427 
9428 // Validate VkFramebufferCreateInfo which includes:
9429 // 1. attachmentCount equals renderPass attachmentCount
9430 // 2. corresponding framebuffer and renderpass attachments have matching formats
9431 // 3. corresponding framebuffer and renderpass attachments have matching sample counts
9432 // 4. fb attachments only have a single mip level
9433 // 5. fb attachment dimensions are each at least as large as the fb
9434 // 6. fb attachments use idenity swizzle
9435 // 7. fb attachments used by renderPass for color/input/ds have correct usage bit set
9436 // 8. fb dimensions are within physical device limits
ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo * pCreateInfo) const9437 bool CoreChecks::ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo *pCreateInfo) const {
9438     bool skip = false;
9439 
9440     const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
9441         lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pCreateInfo->pNext);
9442     if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) != 0) {
9443         if (!enabled_features.imageless_framebuffer_features.imagelessFramebuffer) {
9444             skip |=
9445                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9446                         "VUID-VkFramebufferCreateInfo-flags-03189",
9447                         "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
9448                         "but the imagelessFramebuffer feature is not enabled.");
9449         }
9450 
9451         if (pFramebufferAttachmentsCreateInfo == nullptr) {
9452             skip |=
9453                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9454                         "VUID-VkFramebufferCreateInfo-flags-03190",
9455                         "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
9456                         "but no instance of VkFramebufferAttachmentsCreateInfoKHR is present in the pNext chain.");
9457         } else {
9458             if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != 0 &&
9459                 pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pCreateInfo->attachmentCount) {
9460                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9461                                 "VUID-VkFramebufferCreateInfo-flags-03191",
9462                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount is %u, but "
9463                                 "VkFramebufferAttachmentsCreateInfoKHR attachmentImageInfoCount is %u.",
9464                                 pCreateInfo->attachmentCount, pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
9465             }
9466         }
9467     }
9468 
9469     auto rp_state = GetRenderPassState(pCreateInfo->renderPass);
9470     if (rp_state) {
9471         const VkRenderPassCreateInfo2KHR *rpci = rp_state->createInfo.ptr();
9472         if (rpci->attachmentCount != pCreateInfo->attachmentCount) {
9473             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9474                             HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-attachmentCount-00876",
9475                             "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount of %u does not match attachmentCount "
9476                             "of %u of %s being used to create Framebuffer.",
9477                             pCreateInfo->attachmentCount, rpci->attachmentCount,
9478                             report_data->FormatHandle(pCreateInfo->renderPass).c_str());
9479         } else {
9480             // attachmentCounts match, so make sure corresponding attachment details line up
9481             if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
9482                 const VkImageView *image_views = pCreateInfo->pAttachments;
9483                 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
9484                     auto view_state = GetImageViewState(image_views[i]);
9485                     if (view_state == nullptr) {
9486                         skip |=
9487                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
9488                                     HandleToUint64(image_views[i]), "VUID-VkFramebufferCreateInfo-flags-03188",
9489                                     "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u is not a valid VkImageView.", i);
9490                     } else {
9491                         auto &ivci = view_state->create_info;
9492                         if (ivci.format != rpci->pAttachments[i].format) {
9493                             skip |= log_msg(
9494                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9495                                 HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00880",
9496                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has format of %s that does not "
9497                                 "match the format of %s used by the corresponding attachment for %s.",
9498                                 i, string_VkFormat(ivci.format), string_VkFormat(rpci->pAttachments[i].format),
9499                                 report_data->FormatHandle(pCreateInfo->renderPass).c_str());
9500                         }
9501                         const VkImageCreateInfo *ici = &GetImageState(ivci.image)->createInfo;
9502                         if (ici->samples != rpci->pAttachments[i].samples) {
9503                             skip |=
9504                                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9505                                         HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00881",
9506                                         "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has %s samples that do not "
9507                                         "match the %s "
9508                                         "samples used by the corresponding attachment for %s.",
9509                                         i, string_VkSampleCountFlagBits(ici->samples),
9510                                         string_VkSampleCountFlagBits(rpci->pAttachments[i].samples),
9511                                         report_data->FormatHandle(pCreateInfo->renderPass).c_str());
9512                         }
9513                         // Verify that view only has a single mip level
9514                         if (ivci.subresourceRange.levelCount != 1) {
9515                             skip |= log_msg(
9516                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9517                                 "VUID-VkFramebufferCreateInfo-pAttachments-00883",
9518                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has mip levelCount of %u but "
9519                                 "only a single mip level (levelCount ==  1) is allowed when creating a Framebuffer.",
9520                                 i, ivci.subresourceRange.levelCount);
9521                         }
9522                         const uint32_t mip_level = ivci.subresourceRange.baseMipLevel;
9523                         uint32_t mip_width = max(1u, ici->extent.width >> mip_level);
9524                         uint32_t mip_height = max(1u, ici->extent.height >> mip_level);
9525                         if (!(rpci->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
9526                               rpci->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT)) {
9527                             if ((ivci.subresourceRange.layerCount < pCreateInfo->layers) || (mip_width < pCreateInfo->width) ||
9528                                 (mip_height < pCreateInfo->height)) {
9529                                 skip |= log_msg(
9530                                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9531                                     "VUID-VkFramebufferCreateInfo-pAttachments-00882",
9532                                     "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has dimensions "
9533                                     "smaller than the corresponding framebuffer dimensions. Here are the respective dimensions for "
9534                                     "attachment #%u, framebuffer:\n"
9535                                     "width: %u, %u\n"
9536                                     "height: %u, %u\n"
9537                                     "layerCount: %u, %u\n",
9538                                     i, ivci.subresourceRange.baseMipLevel, i, mip_width, pCreateInfo->width, mip_height,
9539                                     pCreateInfo->height, ivci.subresourceRange.layerCount, pCreateInfo->layers);
9540                             }
9541                         } else {
9542                             if (device_extensions.vk_ext_fragment_density_map) {
9543                                 uint32_t ceiling_width = (uint32_t)ceil(
9544                                     (float)pCreateInfo->width /
9545                                     std::max((float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.width,
9546                                              1.0f));
9547                                 if (mip_width < ceiling_width) {
9548                                     skip |= log_msg(
9549                                         report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9550                                         "VUID-VkFramebufferCreateInfo-pAttachments-02555",
9551                                         "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has width "
9552                                         "smaller than the corresponding the ceiling of framebuffer width / "
9553                                         "maxFragmentDensityTexelSize.width "
9554                                         "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
9555                                         "attachment #%u, framebuffer:\n"
9556                                         "width: %u, the ceiling value: %u\n",
9557                                         i, ivci.subresourceRange.baseMipLevel, i, i, mip_width, ceiling_width);
9558                                 }
9559                                 uint32_t ceiling_height = (uint32_t)ceil(
9560                                     (float)pCreateInfo->height /
9561                                     std::max(
9562                                         (float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.height,
9563                                         1.0f));
9564                                 if (mip_height < ceiling_height) {
9565                                     skip |= log_msg(
9566                                         report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9567                                         "VUID-VkFramebufferCreateInfo-pAttachments-02556",
9568                                         "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has height "
9569                                         "smaller than the corresponding the ceiling of framebuffer height / "
9570                                         "maxFragmentDensityTexelSize.height "
9571                                         "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
9572                                         "attachment #%u, framebuffer:\n"
9573                                         "height: %u, the ceiling value: %u\n",
9574                                         i, ivci.subresourceRange.baseMipLevel, i, i, mip_height, ceiling_height);
9575                                 }
9576                             }
9577                         }
9578                         if (((ivci.components.r != VK_COMPONENT_SWIZZLE_IDENTITY) &&
9579                              (ivci.components.r != VK_COMPONENT_SWIZZLE_R)) ||
9580                             ((ivci.components.g != VK_COMPONENT_SWIZZLE_IDENTITY) &&
9581                              (ivci.components.g != VK_COMPONENT_SWIZZLE_G)) ||
9582                             ((ivci.components.b != VK_COMPONENT_SWIZZLE_IDENTITY) &&
9583                              (ivci.components.b != VK_COMPONENT_SWIZZLE_B)) ||
9584                             ((ivci.components.a != VK_COMPONENT_SWIZZLE_IDENTITY) &&
9585                              (ivci.components.a != VK_COMPONENT_SWIZZLE_A))) {
9586                             skip |= log_msg(
9587                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9588                                 "VUID-VkFramebufferCreateInfo-pAttachments-00884",
9589                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has non-identy swizzle. All "
9590                                 "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
9591                                 "swizzle values:\n"
9592                                 "r swizzle = %s\n"
9593                                 "g swizzle = %s\n"
9594                                 "b swizzle = %s\n"
9595                                 "a swizzle = %s\n",
9596                                 i, string_VkComponentSwizzle(ivci.components.r), string_VkComponentSwizzle(ivci.components.g),
9597                                 string_VkComponentSwizzle(ivci.components.b), string_VkComponentSwizzle(ivci.components.a));
9598                         }
9599                     }
9600                 }
9601             } else if (pFramebufferAttachmentsCreateInfo) {
9602                 // VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR is set
9603                 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
9604                     auto &aii = pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
9605                     bool formatFound = false;
9606                     for (uint32_t j = 0; j < aii.viewFormatCount; ++j) {
9607                         if (aii.pViewFormats[j] == rpci->pAttachments[i].format) {
9608                             formatFound = true;
9609                         }
9610                     }
9611                     if (!formatFound) {
9612                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9613                                         HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-flags-03205",
9614                                         "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u does not include "
9615                                         "format %s used "
9616                                         "by the corresponding attachment for renderPass (%s).",
9617                                         i, string_VkFormat(rpci->pAttachments[i].format),
9618                                         report_data->FormatHandle(pCreateInfo->renderPass).c_str());
9619                     }
9620 
9621                     const char *mismatchedLayersNoMultiviewVuid = device_extensions.vk_khr_multiview
9622                                                                       ? "VUID-VkFramebufferCreateInfo-renderPass-03199"
9623                                                                       : "VUID-VkFramebufferCreateInfo-flags-03200";
9624                     if ((rpci->subpassCount == 0) || (rpci->pSubpasses[0].viewMask == 0)) {
9625                         if (aii.layerCount < pCreateInfo->layers) {
9626                             skip |=
9627                                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9628                                         mismatchedLayersNoMultiviewVuid,
9629                                         "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has only #%u layers, "
9630                                         "but framebuffer has #%u layers.",
9631                                         i, aii.layerCount, pCreateInfo->layers);
9632                         }
9633                     }
9634 
9635                     if (!device_extensions.vk_ext_fragment_density_map) {
9636                         if (aii.width < pCreateInfo->width) {
9637                             skip |= log_msg(
9638                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9639                                 "VUID-VkFramebufferCreateInfo-flags-03192",
9640                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a width of only #%u, "
9641                                 "but framebuffer has a width of #%u.",
9642                                 i, aii.width, pCreateInfo->width);
9643                         }
9644 
9645                         if (aii.height < pCreateInfo->height) {
9646                             skip |= log_msg(
9647                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9648                                 "VUID-VkFramebufferCreateInfo-flags-03193",
9649                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a height of only #%u, "
9650                                 "but framebuffer has a height of #%u.",
9651                                 i, aii.height, pCreateInfo->height);
9652                         }
9653                     }
9654                 }
9655 
9656                 // Validate image usage
9657                 uint32_t attachment_index = VK_ATTACHMENT_UNUSED;
9658                 for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
9659                     skip |= MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pColorAttachments, pCreateInfo,
9660                                        VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
9661                     skip |=
9662                         MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pResolveAttachments, pCreateInfo,
9663                                    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
9664                     skip |= MatchUsage(1, rpci->pSubpasses[i].pDepthStencilAttachment, pCreateInfo,
9665                                        VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03202");
9666                     skip |= MatchUsage(rpci->pSubpasses[i].inputAttachmentCount, rpci->pSubpasses[i].pInputAttachments, pCreateInfo,
9667                                        VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03204");
9668 
9669                     const VkSubpassDescriptionDepthStencilResolveKHR *pDepthStencilResolve =
9670                         lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(rpci->pSubpasses[i].pNext);
9671                     if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr) {
9672                         skip |= MatchUsage(1, pDepthStencilResolve->pDepthStencilResolveAttachment, pCreateInfo,
9673                                            VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03203");
9674                     }
9675                 }
9676 
9677                 if (device_extensions.vk_khr_multiview) {
9678                     if ((rpci->subpassCount > 0) && (rpci->pSubpasses[0].viewMask != 0)) {
9679                         for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
9680                             const VkSubpassDescriptionDepthStencilResolveKHR *pDepthStencilResolve =
9681                                 lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(rpci->pSubpasses[i].pNext);
9682                             uint32_t view_bits = rpci->pSubpasses[i].viewMask;
9683                             uint32_t highest_view_bit = 0;
9684 
9685                             for (int j = 0; j < 32; ++j) {
9686                                 if (((view_bits >> j) & 1) != 0) {
9687                                     highest_view_bit = j;
9688                                 }
9689                             }
9690 
9691                             for (uint32_t j = 0; j < rpci->pSubpasses[i].colorAttachmentCount; ++j) {
9692                                 attachment_index = rpci->pSubpasses[i].pColorAttachments[j].attachment;
9693                                 if (attachment_index != VK_ATTACHMENT_UNUSED) {
9694                                     uint32_t layer_count =
9695                                         pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
9696                                     if (layer_count <= highest_view_bit) {
9697                                         skip |= log_msg(
9698                                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9699                                             HandleToUint64(pCreateInfo->renderPass),
9700                                             "VUID-VkFramebufferCreateInfo-renderPass-03198",
9701                                             "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
9702                                             "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
9703                                             "includes layer %u, with that attachment specified as a color attachment %u.",
9704                                             attachment_index, layer_count, i,
9705                                             report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
9706                                     }
9707                                 }
9708                                 if (rpci->pSubpasses[i].pResolveAttachments) {
9709                                     attachment_index = rpci->pSubpasses[i].pResolveAttachments[j].attachment;
9710                                     if (attachment_index != VK_ATTACHMENT_UNUSED) {
9711                                         uint32_t layer_count =
9712                                             pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
9713                                         if (layer_count <= highest_view_bit) {
9714                                             skip |= log_msg(
9715                                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
9716                                                 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9717                                                 HandleToUint64(pCreateInfo->renderPass),
9718                                                 "VUID-VkFramebufferCreateInfo-renderPass-03198",
9719                                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
9720                                                 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
9721                                                 "includes layer %u, with that attachment specified as a resolve attachment %u.",
9722                                                 attachment_index, layer_count, i,
9723                                                 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
9724                                         }
9725                                     }
9726                                 }
9727                             }
9728 
9729                             for (uint32_t j = 0; j < rpci->pSubpasses[i].inputAttachmentCount; ++j) {
9730                                 attachment_index = rpci->pSubpasses[i].pInputAttachments[j].attachment;
9731                                 if (attachment_index != VK_ATTACHMENT_UNUSED) {
9732                                     uint32_t layer_count =
9733                                         pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
9734                                     if (layer_count <= highest_view_bit) {
9735                                         skip |= log_msg(
9736                                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9737                                             HandleToUint64(pCreateInfo->renderPass),
9738                                             "VUID-VkFramebufferCreateInfo-renderPass-03198",
9739                                             "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
9740                                             "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
9741                                             "includes layer %u, with that attachment specified as an input attachment %u.",
9742                                             attachment_index, layer_count, i,
9743                                             report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
9744                                     }
9745                                 }
9746                             }
9747 
9748                             if (rpci->pSubpasses[i].pDepthStencilAttachment != nullptr) {
9749                                 attachment_index = rpci->pSubpasses[i].pDepthStencilAttachment->attachment;
9750                                 if (attachment_index != VK_ATTACHMENT_UNUSED) {
9751                                     uint32_t layer_count =
9752                                         pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
9753                                     if (layer_count <= highest_view_bit) {
9754                                         skip |= log_msg(
9755                                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9756                                             HandleToUint64(pCreateInfo->renderPass),
9757                                             "VUID-VkFramebufferCreateInfo-renderPass-03198",
9758                                             "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
9759                                             "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
9760                                             "includes layer %u, with that attachment specified as a depth/stencil attachment.",
9761                                             attachment_index, layer_count, i,
9762                                             report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
9763                                     }
9764                                 }
9765 
9766                                 if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr &&
9767                                     pDepthStencilResolve->pDepthStencilResolveAttachment != nullptr) {
9768                                     attachment_index = pDepthStencilResolve->pDepthStencilResolveAttachment->attachment;
9769                                     if (attachment_index != VK_ATTACHMENT_UNUSED) {
9770                                         uint32_t layer_count =
9771                                             pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
9772                                         if (layer_count <= highest_view_bit) {
9773                                             skip |= log_msg(
9774                                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
9775                                                 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
9776                                                 HandleToUint64(pCreateInfo->renderPass),
9777                                                 "VUID-VkFramebufferCreateInfo-renderPass-03198",
9778                                                 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
9779                                                 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
9780                                                 "includes layer %u, with that attachment specified as a depth/stencil resolve "
9781                                                 "attachment.",
9782                                                 attachment_index, layer_count, i,
9783                                                 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
9784                                         }
9785                                     }
9786                                 }
9787                             }
9788                         }
9789                     }
9790                 }
9791             }
9792 
9793             if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
9794                 // Verify correct attachment usage flags
9795                 for (uint32_t subpass = 0; subpass < rpci->subpassCount; subpass++) {
9796                     // Verify input attachments:
9797                     skip |= MatchUsage(rpci->pSubpasses[subpass].inputAttachmentCount, rpci->pSubpasses[subpass].pInputAttachments,
9798                                        pCreateInfo, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
9799                                        "VUID-VkFramebufferCreateInfo-pAttachments-00879");
9800                     // Verify color attachments:
9801                     skip |= MatchUsage(rpci->pSubpasses[subpass].colorAttachmentCount, rpci->pSubpasses[subpass].pColorAttachments,
9802                                        pCreateInfo, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
9803                                        "VUID-VkFramebufferCreateInfo-pAttachments-00877");
9804                     // Verify depth/stencil attachments:
9805                     skip |=
9806                         MatchUsage(1, rpci->pSubpasses[subpass].pDepthStencilAttachment, pCreateInfo,
9807                                    VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
9808                 }
9809             }
9810         }
9811     }
9812     // Verify FB dimensions are within physical device limits
9813     if (pCreateInfo->width > phys_dev_props.limits.maxFramebufferWidth) {
9814         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9815                         "VUID-VkFramebufferCreateInfo-width-00886",
9816                         "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width exceeds physical device limits. Requested "
9817                         "width: %u, device max: %u\n",
9818                         pCreateInfo->width, phys_dev_props.limits.maxFramebufferWidth);
9819     }
9820     if (pCreateInfo->height > phys_dev_props.limits.maxFramebufferHeight) {
9821         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9822                         "VUID-VkFramebufferCreateInfo-height-00888",
9823                         "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height exceeds physical device limits. Requested "
9824                         "height: %u, device max: %u\n",
9825                         pCreateInfo->height, phys_dev_props.limits.maxFramebufferHeight);
9826     }
9827     if (pCreateInfo->layers > phys_dev_props.limits.maxFramebufferLayers) {
9828         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9829                         "VUID-VkFramebufferCreateInfo-layers-00890",
9830                         "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers exceeds physical device limits. Requested "
9831                         "layers: %u, device max: %u\n",
9832                         pCreateInfo->layers, phys_dev_props.limits.maxFramebufferLayers);
9833     }
9834     // Verify FB dimensions are greater than zero
9835     if (pCreateInfo->width <= 0) {
9836         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9837                         "VUID-VkFramebufferCreateInfo-width-00885",
9838                         "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width must be greater than zero.");
9839     }
9840     if (pCreateInfo->height <= 0) {
9841         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9842                         "VUID-VkFramebufferCreateInfo-height-00887",
9843                         "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height must be greater than zero.");
9844     }
9845     if (pCreateInfo->layers <= 0) {
9846         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9847                         "VUID-VkFramebufferCreateInfo-layers-00889",
9848                         "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers must be greater than zero.");
9849     }
9850     return skip;
9851 }
9852 
PreCallValidateCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)9853 bool CoreChecks::PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
9854                                                   const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
9855     // TODO : Verify that renderPass FB is created with is compatible with FB
9856     bool skip = false;
9857     skip |= ValidateFramebufferCreateInfo(pCreateInfo);
9858     return skip;
9859 }
9860 
PostCallRecordCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer,VkResult result)9861 void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
9862                                                              const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
9863                                                              VkResult result) {
9864     if (VK_SUCCESS != result) return;
9865     // Shadow create info and store in map
9866     std::unique_ptr<FRAMEBUFFER_STATE> fb_state(
9867         new FRAMEBUFFER_STATE(*pFramebuffer, pCreateInfo, GetRenderPassStateSharedPtr(pCreateInfo->renderPass)));
9868 
9869     if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
9870         for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
9871             VkImageView view = pCreateInfo->pAttachments[i];
9872             auto view_state = GetImageViewState(view);
9873             if (!view_state) {
9874                 continue;
9875             }
9876         }
9877     }
9878     frameBufferMap[*pFramebuffer] = std::move(fb_state);
9879 }
9880 
FindDependency(const uint32_t index,const uint32_t dependent,const std::vector<DAGNode> & subpass_to_node,std::unordered_set<uint32_t> & processed_nodes)9881 static bool FindDependency(const uint32_t index, const uint32_t dependent, const std::vector<DAGNode> &subpass_to_node,
9882                            std::unordered_set<uint32_t> &processed_nodes) {
9883     // If we have already checked this node we have not found a dependency path so return false.
9884     if (processed_nodes.count(index)) return false;
9885     processed_nodes.insert(index);
9886     const DAGNode &node = subpass_to_node[index];
9887     // Look for a dependency path. If one exists return true else recurse on the previous nodes.
9888     if (std::find(node.prev.begin(), node.prev.end(), dependent) == node.prev.end()) {
9889         for (auto elem : node.prev) {
9890             if (FindDependency(elem, dependent, subpass_to_node, processed_nodes)) return true;
9891         }
9892     } else {
9893         return true;
9894     }
9895     return false;
9896 }
9897 
IsImageLayoutReadOnly(VkImageLayout layout) const9898 bool CoreChecks::IsImageLayoutReadOnly(VkImageLayout layout) const {
9899     if ((layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) || (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) ||
9900         (layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) ||
9901         (layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL)) {
9902         return true;
9903     }
9904     return false;
9905 }
9906 
CheckDependencyExists(const uint32_t subpass,const VkImageLayout layout,const std::vector<SubpassLayout> & dependent_subpasses,const std::vector<DAGNode> & subpass_to_node,bool & skip) const9907 bool CoreChecks::CheckDependencyExists(const uint32_t subpass, const VkImageLayout layout,
9908                                        const std::vector<SubpassLayout> &dependent_subpasses,
9909                                        const std::vector<DAGNode> &subpass_to_node, bool &skip) const {
9910     bool result = true;
9911     bool bImageLayoutReadOnly = IsImageLayoutReadOnly(layout);
9912     // Loop through all subpasses that share the same attachment and make sure a dependency exists
9913     for (uint32_t k = 0; k < dependent_subpasses.size(); ++k) {
9914         const SubpassLayout &sp = dependent_subpasses[k];
9915         if (subpass == sp.index) continue;
9916         if (bImageLayoutReadOnly && IsImageLayoutReadOnly(sp.layout)) continue;
9917 
9918         const DAGNode &node = subpass_to_node[subpass];
9919         // Check for a specified dependency between the two nodes. If one exists we are done.
9920         auto prev_elem = std::find(node.prev.begin(), node.prev.end(), sp.index);
9921         auto next_elem = std::find(node.next.begin(), node.next.end(), sp.index);
9922         if (prev_elem == node.prev.end() && next_elem == node.next.end()) {
9923             // If no dependency exits an implicit dependency still might. If not, throw an error.
9924             std::unordered_set<uint32_t> processed_nodes;
9925             if (!(FindDependency(subpass, sp.index, subpass_to_node, processed_nodes) ||
9926                   FindDependency(sp.index, subpass, subpass_to_node, processed_nodes))) {
9927                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9928                                 kVUID_Core_DrawState_InvalidRenderpass,
9929                                 "A dependency between subpasses %d and %d must exist but one is not specified.", subpass, sp.index);
9930                 result = false;
9931             }
9932         }
9933     }
9934     return result;
9935 }
9936 
CheckPreserved(const VkRenderPassCreateInfo2KHR * pCreateInfo,const int index,const uint32_t attachment,const std::vector<DAGNode> & subpass_to_node,int depth,bool & skip) const9937 bool CoreChecks::CheckPreserved(const VkRenderPassCreateInfo2KHR *pCreateInfo, const int index, const uint32_t attachment,
9938                                 const std::vector<DAGNode> &subpass_to_node, int depth, bool &skip) const {
9939     const DAGNode &node = subpass_to_node[index];
9940     // If this node writes to the attachment return true as next nodes need to preserve the attachment.
9941     const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[index];
9942     for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
9943         if (attachment == subpass.pColorAttachments[j].attachment) return true;
9944     }
9945     for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
9946         if (attachment == subpass.pInputAttachments[j].attachment) return true;
9947     }
9948     if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
9949         if (attachment == subpass.pDepthStencilAttachment->attachment) return true;
9950     }
9951     bool result = false;
9952     // Loop through previous nodes and see if any of them write to the attachment.
9953     for (auto elem : node.prev) {
9954         result |= CheckPreserved(pCreateInfo, elem, attachment, subpass_to_node, depth + 1, skip);
9955     }
9956     // If the attachment was written to by a previous node than this node needs to preserve it.
9957     if (result && depth > 0) {
9958         bool has_preserved = false;
9959         for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
9960             if (subpass.pPreserveAttachments[j] == attachment) {
9961                 has_preserved = true;
9962                 break;
9963             }
9964         }
9965         if (!has_preserved) {
9966             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
9967                             kVUID_Core_DrawState_InvalidRenderpass,
9968                             "Attachment %d is used by a later subpass and must be preserved in subpass %d.", attachment, index);
9969         }
9970     }
9971     return result;
9972 }
9973 
9974 template <class T>
IsRangeOverlapping(T offset1,T size1,T offset2,T size2)9975 bool IsRangeOverlapping(T offset1, T size1, T offset2, T size2) {
9976     return (((offset1 + size1) > offset2) && ((offset1 + size1) < (offset2 + size2))) ||
9977            ((offset1 > offset2) && (offset1 < (offset2 + size2)));
9978 }
9979 
IsRegionOverlapping(VkImageSubresourceRange range1,VkImageSubresourceRange range2)9980 bool IsRegionOverlapping(VkImageSubresourceRange range1, VkImageSubresourceRange range2) {
9981     return (IsRangeOverlapping(range1.baseMipLevel, range1.levelCount, range2.baseMipLevel, range2.levelCount) &&
9982             IsRangeOverlapping(range1.baseArrayLayer, range1.layerCount, range2.baseArrayLayer, range2.layerCount));
9983 }
9984 
ValidateDependencies(FRAMEBUFFER_STATE const * framebuffer,RENDER_PASS_STATE const * renderPass) const9985 bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, RENDER_PASS_STATE const *renderPass) const {
9986     bool skip = false;
9987     auto const pFramebufferInfo = framebuffer->createInfo.ptr();
9988     auto const pCreateInfo = renderPass->createInfo.ptr();
9989     auto const &subpass_to_node = renderPass->subpassToNode;
9990 
9991     struct Attachment {
9992         std::vector<SubpassLayout> outputs;
9993         std::vector<SubpassLayout> inputs;
9994         std::vector<uint32_t> overlapping;
9995     };
9996 
9997     std::vector<Attachment> attachments(pCreateInfo->attachmentCount);
9998 
9999     // Find overlapping attachments
10000     for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
10001         for (uint32_t j = i + 1; j < pCreateInfo->attachmentCount; ++j) {
10002             VkImageView viewi = pFramebufferInfo->pAttachments[i];
10003             VkImageView viewj = pFramebufferInfo->pAttachments[j];
10004             if (viewi == viewj) {
10005                 attachments[i].overlapping.emplace_back(j);
10006                 attachments[j].overlapping.emplace_back(i);
10007                 continue;
10008             }
10009             auto view_state_i = GetImageViewState(viewi);
10010             auto view_state_j = GetImageViewState(viewj);
10011             if (!view_state_i || !view_state_j) {
10012                 continue;
10013             }
10014             auto view_ci_i = view_state_i->create_info;
10015             auto view_ci_j = view_state_j->create_info;
10016             if (view_ci_i.image == view_ci_j.image && IsRegionOverlapping(view_ci_i.subresourceRange, view_ci_j.subresourceRange)) {
10017                 attachments[i].overlapping.emplace_back(j);
10018                 attachments[j].overlapping.emplace_back(i);
10019                 continue;
10020             }
10021             auto image_data_i = GetImageState(view_ci_i.image);
10022             auto image_data_j = GetImageState(view_ci_j.image);
10023             if (!image_data_i || !image_data_j) {
10024                 continue;
10025             }
10026             if (image_data_i->binding.mem == image_data_j->binding.mem &&
10027                 IsRangeOverlapping(image_data_i->binding.offset, image_data_i->binding.size, image_data_j->binding.offset,
10028                                    image_data_j->binding.size)) {
10029                 attachments[i].overlapping.emplace_back(j);
10030                 attachments[j].overlapping.emplace_back(i);
10031             }
10032         }
10033     }
10034     // Find for each attachment the subpasses that use them.
10035     unordered_set<uint32_t> attachmentIndices;
10036     for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
10037         const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
10038         attachmentIndices.clear();
10039         for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
10040             uint32_t attachment = subpass.pInputAttachments[j].attachment;
10041             if (attachment == VK_ATTACHMENT_UNUSED) continue;
10042             SubpassLayout sp = {i, subpass.pInputAttachments[j].layout};
10043             attachments[attachment].inputs.emplace_back(sp);
10044             for (auto overlapping_attachment : attachments[attachment].overlapping) {
10045                 attachments[overlapping_attachment].inputs.emplace_back(sp);
10046             }
10047         }
10048         for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
10049             uint32_t attachment = subpass.pColorAttachments[j].attachment;
10050             if (attachment == VK_ATTACHMENT_UNUSED) continue;
10051             SubpassLayout sp = {i, subpass.pColorAttachments[j].layout};
10052             attachments[attachment].outputs.emplace_back(sp);
10053             for (auto overlapping_attachment : attachments[attachment].overlapping) {
10054                 attachments[overlapping_attachment].outputs.emplace_back(sp);
10055             }
10056             attachmentIndices.insert(attachment);
10057         }
10058         if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
10059             uint32_t attachment = subpass.pDepthStencilAttachment->attachment;
10060             SubpassLayout sp = {i, subpass.pDepthStencilAttachment->layout};
10061             attachments[attachment].outputs.emplace_back(sp);
10062             for (auto overlapping_attachment : attachments[attachment].overlapping) {
10063                 attachments[overlapping_attachment].outputs.emplace_back(sp);
10064             }
10065 
10066             if (attachmentIndices.count(attachment)) {
10067                 skip |=
10068                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10069                             kVUID_Core_DrawState_InvalidRenderpass,
10070                             "Cannot use same attachment (%u) as both color and depth output in same subpass (%u).", attachment, i);
10071             }
10072         }
10073     }
10074     // If there is a dependency needed make sure one exists
10075     for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
10076         const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
10077         // If the attachment is an input then all subpasses that output must have a dependency relationship
10078         for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
10079             uint32_t attachment = subpass.pInputAttachments[j].attachment;
10080             if (attachment == VK_ATTACHMENT_UNUSED) continue;
10081             CheckDependencyExists(i, subpass.pInputAttachments[j].layout, attachments[attachment].outputs, subpass_to_node, skip);
10082         }
10083         // If the attachment is an output then all subpasses that use the attachment must have a dependency relationship
10084         for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
10085             uint32_t attachment = subpass.pColorAttachments[j].attachment;
10086             if (attachment == VK_ATTACHMENT_UNUSED) continue;
10087             CheckDependencyExists(i, subpass.pColorAttachments[j].layout, attachments[attachment].outputs, subpass_to_node, skip);
10088             CheckDependencyExists(i, subpass.pColorAttachments[j].layout, attachments[attachment].inputs, subpass_to_node, skip);
10089         }
10090         if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
10091             const uint32_t &attachment = subpass.pDepthStencilAttachment->attachment;
10092             CheckDependencyExists(i, subpass.pDepthStencilAttachment->layout, attachments[attachment].outputs, subpass_to_node,
10093                                   skip);
10094             CheckDependencyExists(i, subpass.pDepthStencilAttachment->layout, attachments[attachment].inputs, subpass_to_node,
10095                                   skip);
10096         }
10097     }
10098     // Loop through implicit dependencies, if this pass reads make sure the attachment is preserved for all passes after it was
10099     // written.
10100     for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
10101         const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
10102         for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
10103             CheckPreserved(pCreateInfo, i, subpass.pInputAttachments[j].attachment, subpass_to_node, 0, skip);
10104         }
10105     }
10106     return skip;
10107 }
10108 
RecordRenderPassDAG(RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2KHR * pCreateInfo,RENDER_PASS_STATE * render_pass)10109 void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
10110                                                  RENDER_PASS_STATE *render_pass) {
10111     auto &subpass_to_node = render_pass->subpassToNode;
10112     subpass_to_node.resize(pCreateInfo->subpassCount);
10113     auto &self_dependencies = render_pass->self_dependencies;
10114     self_dependencies.resize(pCreateInfo->subpassCount);
10115 
10116     for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
10117         subpass_to_node[i].pass = i;
10118         self_dependencies[i].clear();
10119     }
10120     for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
10121         const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
10122         if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
10123             if (dependency.srcSubpass == dependency.dstSubpass) {
10124                 self_dependencies[dependency.srcSubpass].push_back(i);
10125             } else {
10126                 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
10127                 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
10128             }
10129         }
10130     }
10131 }
10132 
ValidateRenderPassDAG(RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2KHR * pCreateInfo) const10133 bool CoreChecks::ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
10134     bool skip = false;
10135     const char *vuid;
10136     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10137 
10138     for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
10139         const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
10140         VkPipelineStageFlagBits latest_src_stage = GetLogicallyLatestGraphicsPipelineStage(dependency.srcStageMask);
10141         VkPipelineStageFlagBits earliest_dst_stage = GetLogicallyEarliestGraphicsPipelineStage(dependency.dstStageMask);
10142 
10143         // The first subpass here serves as a good proxy for "is multiview enabled" - since all view masks need to be non-zero if
10144         // any are, which enables multiview.
10145         if (use_rp2 && (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && (pCreateInfo->pSubpasses[0].viewMask == 0)) {
10146             skip |= log_msg(
10147                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10148                 "VUID-VkRenderPassCreateInfo2KHR-viewMask-03059",
10149                 "Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but multiview is not enabled for this render pass.", i);
10150         } else if (use_rp2 && !(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && dependency.viewOffset != 0) {
10151             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10152                             "VUID-VkSubpassDependency2KHR-dependencyFlags-03092",
10153                             "Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but also specifies a view offset of %u.", i,
10154                             dependency.viewOffset);
10155         } else if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL || dependency.dstSubpass == VK_SUBPASS_EXTERNAL) {
10156             if (dependency.srcSubpass == dependency.dstSubpass) {
10157                 vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03085" : "VUID-VkSubpassDependency-srcSubpass-00865";
10158                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10159                                 "The src and dst subpasses in dependency %u are both external.", i);
10160             } else if (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) {
10161                 if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
10162                     vuid = "VUID-VkSubpassDependency-dependencyFlags-02520";
10163                 } else {  // dependency.dstSubpass == VK_SUBPASS_EXTERNAL
10164                     vuid = "VUID-VkSubpassDependency-dependencyFlags-02521";
10165                 }
10166                 if (use_rp2) {
10167                     // Create render pass 2 distinguishes between source and destination external dependencies.
10168                     if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
10169                         vuid = "VUID-VkSubpassDependency2KHR-dependencyFlags-03090";
10170                     } else {
10171                         vuid = "VUID-VkSubpassDependency2KHR-dependencyFlags-03091";
10172                     }
10173                 }
10174                 skip |=
10175                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10176                             "Dependency %u specifies an external dependency but also specifies VK_DEPENDENCY_VIEW_LOCAL_BIT.", i);
10177             }
10178         } else if (dependency.srcSubpass > dependency.dstSubpass) {
10179             vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03084" : "VUID-VkSubpassDependency-srcSubpass-00864";
10180             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10181                             "Dependency %u specifies a dependency from a later subpass (%u) to an earlier subpass (%u), which is "
10182                             "disallowed to prevent cyclic dependencies.",
10183                             i, dependency.srcSubpass, dependency.dstSubpass);
10184         } else if (dependency.srcSubpass == dependency.dstSubpass) {
10185             if (dependency.viewOffset != 0) {
10186                 vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pNext-01930";
10187                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10188                                 "Dependency %u specifies a self-dependency but has a non-zero view offset of %u", i,
10189                                 dependency.viewOffset);
10190             } else if ((dependency.dependencyFlags | VK_DEPENDENCY_VIEW_LOCAL_BIT) != dependency.dependencyFlags &&
10191                        pCreateInfo->pSubpasses[dependency.srcSubpass].viewMask > 1) {
10192                 vuid =
10193                     use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060" : "VUID-VkSubpassDependency-srcSubpass-00872";
10194                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10195                                 "Dependency %u specifies a self-dependency for subpass %u with a non-zero view mask, but does not "
10196                                 "specify VK_DEPENDENCY_VIEW_LOCAL_BIT.",
10197                                 i, dependency.srcSubpass);
10198             } else if ((HasNonFramebufferStagePipelineStageFlags(dependency.srcStageMask) ||
10199                         HasNonFramebufferStagePipelineStageFlags(dependency.dstStageMask)) &&
10200                        (GetGraphicsPipelineStageLogicalOrdinal(latest_src_stage) >
10201                         GetGraphicsPipelineStageLogicalOrdinal(earliest_dst_stage))) {
10202                 vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03087" : "VUID-VkSubpassDependency-srcSubpass-00867";
10203                 skip |= log_msg(
10204                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10205                     "Dependency %u specifies a self-dependency from logically-later stage (%s) to a logically-earlier stage (%s).",
10206                     i, string_VkPipelineStageFlagBits(latest_src_stage), string_VkPipelineStageFlagBits(earliest_dst_stage));
10207             }
10208         }
10209     }
10210     return skip;
10211 }
10212 
ValidateAttachmentIndex(RenderPassCreateVersion rp_version,uint32_t attachment,uint32_t attachment_count,const char * type) const10213 bool CoreChecks::ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uint32_t attachment, uint32_t attachment_count,
10214                                          const char *type) const {
10215     bool skip = false;
10216     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10217     const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
10218 
10219     if (attachment >= attachment_count && attachment != VK_ATTACHMENT_UNUSED) {
10220         const char *vuid =
10221             use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-attachment-03051" : "VUID-VkRenderPassCreateInfo-attachment-00834";
10222         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10223                         "%s: %s attachment %d must be less than the total number of attachments %d.", type, function_name,
10224                         attachment, attachment_count);
10225     }
10226     return skip;
10227 }
10228 
10229 enum AttachmentType {
10230     ATTACHMENT_COLOR = 1,
10231     ATTACHMENT_DEPTH = 2,
10232     ATTACHMENT_INPUT = 4,
10233     ATTACHMENT_PRESERVE = 8,
10234     ATTACHMENT_RESOLVE = 16,
10235 };
10236 
StringAttachmentType(uint8_t type)10237 char const *StringAttachmentType(uint8_t type) {
10238     switch (type) {
10239         case ATTACHMENT_COLOR:
10240             return "color";
10241         case ATTACHMENT_DEPTH:
10242             return "depth";
10243         case ATTACHMENT_INPUT:
10244             return "input";
10245         case ATTACHMENT_PRESERVE:
10246             return "preserve";
10247         case ATTACHMENT_RESOLVE:
10248             return "resolve";
10249         default:
10250             return "(multiple)";
10251     }
10252 }
10253 
AddAttachmentUse(RenderPassCreateVersion rp_version,uint32_t subpass,std::vector<uint8_t> & attachment_uses,std::vector<VkImageLayout> & attachment_layouts,uint32_t attachment,uint8_t new_use,VkImageLayout new_layout) const10254 bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t subpass, std::vector<uint8_t> &attachment_uses,
10255                                   std::vector<VkImageLayout> &attachment_layouts, uint32_t attachment, uint8_t new_use,
10256                                   VkImageLayout new_layout) const {
10257     if (attachment >= attachment_uses.size()) return false; /* out of range, but already reported */
10258 
10259     bool skip = false;
10260     auto &uses = attachment_uses[attachment];
10261     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10262     const char *vuid;
10263     const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
10264 
10265     if (uses & new_use) {
10266         if (attachment_layouts[attachment] != new_layout) {
10267             vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-layout-02528" : "VUID-VkSubpassDescription-layout-02519";
10268             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10269                     "%s: subpass %u already uses attachment %u with a different image layout (%s vs %s).", function_name, subpass,
10270                     attachment, string_VkImageLayout(attachment_layouts[attachment]), string_VkImageLayout(new_layout));
10271         }
10272     } else if (uses & ~ATTACHMENT_INPUT || (uses && (new_use == ATTACHMENT_RESOLVE || new_use == ATTACHMENT_PRESERVE))) {
10273         /* Note: input attachments are assumed to be done first. */
10274         vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074"
10275                        : "VUID-VkSubpassDescription-pPreserveAttachments-00854";
10276         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10277                         "%s: subpass %u uses attachment %u as both %s and %s attachment.", function_name, subpass, attachment,
10278                         StringAttachmentType(uses), StringAttachmentType(new_use));
10279     } else {
10280         attachment_layouts[attachment] = new_layout;
10281         uses |= new_use;
10282     }
10283 
10284     return skip;
10285 }
10286 
ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2KHR * pCreateInfo) const10287 bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version,
10288                                                    const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
10289     bool skip = false;
10290     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10291     const char *vuid;
10292     const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
10293 
10294     for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
10295         const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
10296         std::vector<uint8_t> attachment_uses(pCreateInfo->attachmentCount);
10297         std::vector<VkImageLayout> attachment_layouts(pCreateInfo->attachmentCount);
10298 
10299         if (subpass.pipelineBindPoint != VK_PIPELINE_BIND_POINT_GRAPHICS) {
10300             vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062"
10301                            : "VUID-VkSubpassDescription-pipelineBindPoint-00844";
10302             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10303                             "%s: Pipeline bind point for subpass %d must be VK_PIPELINE_BIND_POINT_GRAPHICS.", function_name, i);
10304         }
10305 
10306         for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
10307             auto const &attachment_ref = subpass.pInputAttachments[j];
10308             if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
10309                 skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Input");
10310 
10311                 if (attachment_ref.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
10312                     vuid =
10313                         use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkInputAttachmentAspectReference-aspectMask-01964";
10314                     skip |= log_msg(
10315                         report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10316                         "%s: Aspect mask for input attachment reference %d in subpass %d includes VK_IMAGE_ASPECT_METADATA_BIT.",
10317                         function_name, i, j);
10318                 }
10319 
10320                 if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
10321                     skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
10322                                              ATTACHMENT_INPUT, attachment_ref.layout);
10323 
10324                     vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pNext-01963";
10325                     skip |= ValidateImageAspectMask(VK_NULL_HANDLE, pCreateInfo->pAttachments[attachment_ref.attachment].format,
10326                                                     attachment_ref.aspectMask, function_name, vuid);
10327                 }
10328             }
10329 
10330             if (rp_version == RENDER_PASS_VERSION_2) {
10331                 // These are validated automatically as part of parameter validation for create renderpass 1
10332                 // as they are in a struct that only applies to input attachments - not so for v2.
10333 
10334                 // Check for 0
10335                 if (attachment_ref.aspectMask == 0) {
10336                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10337                                     "VUID-VkSubpassDescription2KHR-aspectMask-03176",
10338                                     "%s:  Input attachment (%d) aspect mask must not be 0.", function_name, j);
10339                 } else {
10340                     const VkImageAspectFlags valid_bits =
10341                         (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT |
10342                          VK_IMAGE_ASPECT_METADATA_BIT | VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT |
10343                          VK_IMAGE_ASPECT_PLANE_2_BIT);
10344 
10345                     // Check for valid aspect mask bits
10346                     if (attachment_ref.aspectMask & ~valid_bits) {
10347                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10348                                         "VUID-VkSubpassDescription2KHR-aspectMask-03175",
10349                                         "%s:  Input attachment (%d) aspect mask (0x%" PRIx32 ")is invalid.", function_name, j,
10350                                         attachment_ref.aspectMask);
10351                     }
10352                 }
10353             }
10354         }
10355 
10356         for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
10357             uint32_t attachment = subpass.pPreserveAttachments[j];
10358             if (attachment == VK_ATTACHMENT_UNUSED) {
10359                 vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-attachment-03073" : "VUID-VkSubpassDescription-attachment-00853";
10360                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10361                                 "%s:  Preserve attachment (%d) must not be VK_ATTACHMENT_UNUSED.", function_name, j);
10362             } else {
10363                 skip |= ValidateAttachmentIndex(rp_version, attachment, pCreateInfo->attachmentCount, "Preserve");
10364                 if (attachment < pCreateInfo->attachmentCount) {
10365                     skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment, ATTACHMENT_PRESERVE,
10366                                              VkImageLayout(0) /* preserve doesn't have any layout */);
10367                 }
10368             }
10369         }
10370 
10371         bool subpass_performs_resolve = false;
10372 
10373         for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
10374             if (subpass.pResolveAttachments) {
10375                 auto const &attachment_ref = subpass.pResolveAttachments[j];
10376                 if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
10377                     skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Resolve");
10378 
10379                     if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
10380                         skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
10381                                                  ATTACHMENT_RESOLVE, attachment_ref.layout);
10382 
10383                         subpass_performs_resolve = true;
10384 
10385                         if (pCreateInfo->pAttachments[attachment_ref.attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
10386                             vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067"
10387                                            : "VUID-VkSubpassDescription-pResolveAttachments-00849";
10388                             skip |= log_msg(
10389                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10390                                 "%s:  Subpass %u requests multisample resolve into attachment %u, which must "
10391                                 "have VK_SAMPLE_COUNT_1_BIT but has %s.",
10392                                 function_name, i, attachment_ref.attachment,
10393                                 string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples));
10394                         }
10395                     }
10396                 }
10397             }
10398         }
10399 
10400         if (subpass.pDepthStencilAttachment) {
10401             if (subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
10402                 skip |= ValidateAttachmentIndex(rp_version, subpass.pDepthStencilAttachment->attachment,
10403                                                 pCreateInfo->attachmentCount, "Depth");
10404                 if (subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount) {
10405                     skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts,
10406                                              subpass.pDepthStencilAttachment->attachment, ATTACHMENT_DEPTH,
10407                                              subpass.pDepthStencilAttachment->layout);
10408                 }
10409             }
10410         }
10411 
10412         uint32_t last_sample_count_attachment = VK_ATTACHMENT_UNUSED;
10413         for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
10414             auto const &attachment_ref = subpass.pColorAttachments[j];
10415             skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Color");
10416             if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED && attachment_ref.attachment < pCreateInfo->attachmentCount) {
10417                 skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
10418                                          ATTACHMENT_COLOR, attachment_ref.layout);
10419 
10420                 VkSampleCountFlagBits current_sample_count = pCreateInfo->pAttachments[attachment_ref.attachment].samples;
10421                 if (last_sample_count_attachment != VK_ATTACHMENT_UNUSED) {
10422                     VkSampleCountFlagBits last_sample_count =
10423                         pCreateInfo->pAttachments[subpass.pColorAttachments[last_sample_count_attachment].attachment].samples;
10424                     if (current_sample_count != last_sample_count) {
10425                         vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pColorAttachments-03069"
10426                                        : "VUID-VkSubpassDescription-pColorAttachments-01417";
10427                         skip |=
10428                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10429                                     "%s:  Subpass %u attempts to render to color attachments with inconsistent sample counts."
10430                                     "Color attachment ref %u has sample count %s, whereas previous color attachment ref %u has "
10431                                     "sample count %s.",
10432                                     function_name, i, j, string_VkSampleCountFlagBits(current_sample_count),
10433                                     last_sample_count_attachment, string_VkSampleCountFlagBits(last_sample_count));
10434                     }
10435                 }
10436                 last_sample_count_attachment = j;
10437 
10438                 if (subpass_performs_resolve && current_sample_count == VK_SAMPLE_COUNT_1_BIT) {
10439                     vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066"
10440                                    : "VUID-VkSubpassDescription-pResolveAttachments-00848";
10441                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10442                                     "%s:  Subpass %u requests multisample resolve from attachment %u which has "
10443                                     "VK_SAMPLE_COUNT_1_BIT.",
10444                                     function_name, i, attachment_ref.attachment);
10445                 }
10446 
10447                 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED &&
10448                     subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount) {
10449                     const auto depth_stencil_sample_count =
10450                         pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples;
10451 
10452                     if (device_extensions.vk_amd_mixed_attachment_samples) {
10453                         if (pCreateInfo->pAttachments[attachment_ref.attachment].samples > depth_stencil_sample_count) {
10454                             vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pColorAttachments-03070"
10455                                            : "VUID-VkSubpassDescription-pColorAttachments-01506";
10456                             skip |= log_msg(
10457                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10458                                 "%s:  Subpass %u pColorAttachments[%u] has %s which is larger than "
10459                                 "depth/stencil attachment %s.",
10460                                 function_name, i, j,
10461                                 string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples),
10462                                 string_VkSampleCountFlagBits(depth_stencil_sample_count));
10463                             break;
10464                         }
10465                     }
10466 
10467                     if (!device_extensions.vk_amd_mixed_attachment_samples && !device_extensions.vk_nv_framebuffer_mixed_samples &&
10468                         current_sample_count != depth_stencil_sample_count) {
10469                         vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071"
10470                                        : "VUID-VkSubpassDescription-pDepthStencilAttachment-01418";
10471                         skip |= log_msg(
10472                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10473                             "%s:  Subpass %u attempts to render to use a depth/stencil attachment with sample count that differs "
10474                             "from color attachment %u."
10475                             "The depth attachment ref has sample count %s, whereas color attachment ref %u has sample count %s.",
10476                             function_name, i, j, string_VkSampleCountFlagBits(depth_stencil_sample_count), j,
10477                             string_VkSampleCountFlagBits(current_sample_count));
10478                         break;
10479                     }
10480                 }
10481             }
10482 
10483             if (subpass_performs_resolve && subpass.pResolveAttachments[j].attachment != VK_ATTACHMENT_UNUSED &&
10484                 subpass.pResolveAttachments[j].attachment < pCreateInfo->attachmentCount) {
10485                 if (attachment_ref.attachment == VK_ATTACHMENT_UNUSED) {
10486                     vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065"
10487                                    : "VUID-VkSubpassDescription-pResolveAttachments-00847";
10488                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10489                                     "%s:  Subpass %u requests multisample resolve from attachment %u which has "
10490                                     "attachment=VK_ATTACHMENT_UNUSED.",
10491                                     function_name, i, attachment_ref.attachment);
10492                 } else {
10493                     const auto &color_desc = pCreateInfo->pAttachments[attachment_ref.attachment];
10494                     const auto &resolve_desc = pCreateInfo->pAttachments[subpass.pResolveAttachments[j].attachment];
10495                     if (color_desc.format != resolve_desc.format) {
10496                         vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068"
10497                                        : "VUID-VkSubpassDescription-pResolveAttachments-00850";
10498                         skip |=
10499                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10500                                     "%s:  Subpass %u pColorAttachments[%u] resolves to an attachment with a "
10501                                     "different format. color format: %u, resolve format: %u.",
10502                                     function_name, i, j, color_desc.format, resolve_desc.format);
10503                     }
10504                 }
10505             }
10506         }
10507     }
10508     return skip;
10509 }
10510 
MarkAttachmentFirstUse(RENDER_PASS_STATE * render_pass,uint32_t index,bool is_read)10511 static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
10512     if (index == VK_ATTACHMENT_UNUSED) return;
10513 
10514     if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
10515 }
10516 
ValidateCreateRenderPass(VkDevice device,RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2KHR * pCreateInfo) const10517 bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersion rp_version,
10518                                           const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
10519     bool skip = false;
10520     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10521     const char *vuid;
10522     const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
10523 
10524     // TODO: As part of wrapping up the mem_tracker/core_validation merge the following routine should be consolidated with
10525     //       ValidateLayouts.
10526     skip |= ValidateRenderpassAttachmentUsage(rp_version, pCreateInfo);
10527 
10528     skip |= ValidateRenderPassDAG(rp_version, pCreateInfo);
10529 
10530     // Validate multiview correlation and view masks
10531     bool viewMaskZero = false;
10532     bool viewMaskNonZero = false;
10533 
10534     for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
10535         const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
10536         if (subpass.viewMask != 0) {
10537             viewMaskNonZero = true;
10538         } else {
10539             viewMaskZero = true;
10540         }
10541 
10542         if ((subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX) != 0 &&
10543             (subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX) == 0) {
10544             vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-flags-03076" : "VUID-VkSubpassDescription-flags-00856";
10545             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10546                             "%s: The flags parameter of subpass description %u includes "
10547                             "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX but does not also include "
10548                             "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX.",
10549                             function_name, i);
10550         }
10551     }
10552 
10553     if (rp_version == RENDER_PASS_VERSION_2) {
10554         if (viewMaskNonZero && viewMaskZero) {
10555             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10556                             "VUID-VkRenderPassCreateInfo2KHR-viewMask-03058",
10557                             "%s: Some view masks are non-zero whilst others are zero.", function_name);
10558         }
10559 
10560         if (viewMaskZero && pCreateInfo->correlatedViewMaskCount != 0) {
10561             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10562                             "VUID-VkRenderPassCreateInfo2KHR-viewMask-03057",
10563                             "%s: Multiview is not enabled but correlation masks are still provided", function_name);
10564         }
10565     }
10566     uint32_t aggregated_cvms = 0;
10567     for (uint32_t i = 0; i < pCreateInfo->correlatedViewMaskCount; ++i) {
10568         if (aggregated_cvms & pCreateInfo->pCorrelatedViewMasks[i]) {
10569             vuid = use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056"
10570                            : "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841";
10571             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10572                             "%s: pCorrelatedViewMasks[%u] contains a previously appearing view bit.", function_name, i);
10573         }
10574         aggregated_cvms |= pCreateInfo->pCorrelatedViewMasks[i];
10575     }
10576 
10577     for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
10578         auto const &dependency = pCreateInfo->pDependencies[i];
10579         if (rp_version == RENDER_PASS_VERSION_2) {
10580             skip |= ValidateStageMaskGsTsEnables(
10581                 dependency.srcStageMask, function_name, "VUID-VkSubpassDependency2KHR-srcStageMask-03080",
10582                 "VUID-VkSubpassDependency2KHR-srcStageMask-03082", "VUID-VkSubpassDependency2KHR-srcStageMask-02103",
10583                 "VUID-VkSubpassDependency2KHR-srcStageMask-02104");
10584             skip |= ValidateStageMaskGsTsEnables(
10585                 dependency.dstStageMask, function_name, "VUID-VkSubpassDependency2KHR-dstStageMask-03081",
10586                 "VUID-VkSubpassDependency2KHR-dstStageMask-03083", "VUID-VkSubpassDependency2KHR-dstStageMask-02105",
10587                 "VUID-VkSubpassDependency2KHR-dstStageMask-02106");
10588         } else {
10589             skip |= ValidateStageMaskGsTsEnables(
10590                 dependency.srcStageMask, function_name, "VUID-VkSubpassDependency-srcStageMask-00860",
10591                 "VUID-VkSubpassDependency-srcStageMask-00862", "VUID-VkSubpassDependency-srcStageMask-02099",
10592                 "VUID-VkSubpassDependency-srcStageMask-02100");
10593             skip |= ValidateStageMaskGsTsEnables(
10594                 dependency.dstStageMask, function_name, "VUID-VkSubpassDependency-dstStageMask-00861",
10595                 "VUID-VkSubpassDependency-dstStageMask-00863", "VUID-VkSubpassDependency-dstStageMask-02101",
10596                 "VUID-VkSubpassDependency-dstStageMask-02102");
10597         }
10598 
10599         if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.srcAccessMask, dependency.srcStageMask)) {
10600             vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcAccessMask-03088" : "VUID-VkSubpassDependency-srcAccessMask-00868";
10601             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10602                             "%s: pDependencies[%u].srcAccessMask (0x%" PRIx32 ") is not supported by srcStageMask (0x%" PRIx32 ").",
10603                             function_name, i, dependency.srcAccessMask, dependency.srcStageMask);
10604         }
10605 
10606         if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.dstAccessMask, dependency.dstStageMask)) {
10607             vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-dstAccessMask-03089" : "VUID-VkSubpassDependency-dstAccessMask-00869";
10608             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
10609                             "%s: pDependencies[%u].dstAccessMask (0x%" PRIx32 ") is not supported by dstStageMask (0x%" PRIx32 ").",
10610                             function_name, i, dependency.dstAccessMask, dependency.dstStageMask);
10611         }
10612     }
10613     if (!skip) {
10614         skip |= ValidateLayouts(rp_version, device, pCreateInfo);
10615     }
10616     return skip;
10617 }
10618 
PreCallValidateCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)10619 bool CoreChecks::PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
10620                                                  const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
10621     bool skip = false;
10622     // Handle extension structs from KHR_multiview and KHR_maintenance2 that can only be validated for RP1 (indices out of bounds)
10623     const VkRenderPassMultiviewCreateInfo *pMultiviewInfo = lvl_find_in_chain<VkRenderPassMultiviewCreateInfo>(pCreateInfo->pNext);
10624     if (pMultiviewInfo) {
10625         if (pMultiviewInfo->subpassCount && pMultiviewInfo->subpassCount != pCreateInfo->subpassCount) {
10626             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10627                             "VUID-VkRenderPassCreateInfo-pNext-01928",
10628                             "Subpass count is %u but multiview info has a subpass count of %u.", pCreateInfo->subpassCount,
10629                             pMultiviewInfo->subpassCount);
10630         } else if (pMultiviewInfo->dependencyCount && pMultiviewInfo->dependencyCount != pCreateInfo->dependencyCount) {
10631             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10632                             "VUID-VkRenderPassCreateInfo-pNext-01929",
10633                             "Dependency count is %u but multiview info has a dependency count of %u.", pCreateInfo->dependencyCount,
10634                             pMultiviewInfo->dependencyCount);
10635         }
10636     }
10637     const VkRenderPassInputAttachmentAspectCreateInfo *pInputAttachmentAspectInfo =
10638         lvl_find_in_chain<VkRenderPassInputAttachmentAspectCreateInfo>(pCreateInfo->pNext);
10639     if (pInputAttachmentAspectInfo) {
10640         for (uint32_t i = 0; i < pInputAttachmentAspectInfo->aspectReferenceCount; ++i) {
10641             uint32_t subpass = pInputAttachmentAspectInfo->pAspectReferences[i].subpass;
10642             uint32_t attachment = pInputAttachmentAspectInfo->pAspectReferences[i].inputAttachmentIndex;
10643             if (subpass >= pCreateInfo->subpassCount) {
10644                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10645                                 "VUID-VkRenderPassCreateInfo-pNext-01926",
10646                                 "Subpass index %u specified by input attachment aspect info %u is greater than the subpass "
10647                                 "count of %u for this render pass.",
10648                                 subpass, i, pCreateInfo->subpassCount);
10649             } else if (pCreateInfo->pSubpasses && attachment >= pCreateInfo->pSubpasses[subpass].inputAttachmentCount) {
10650                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10651                                 "VUID-VkRenderPassCreateInfo-pNext-01927",
10652                                 "Input attachment index %u specified by input attachment aspect info %u is greater than the "
10653                                 "input attachment count of %u for this subpass.",
10654                                 attachment, i, pCreateInfo->pSubpasses[subpass].inputAttachmentCount);
10655             }
10656         }
10657     }
10658     const VkRenderPassFragmentDensityMapCreateInfoEXT *pFragmentDensityMapInfo =
10659         lvl_find_in_chain<VkRenderPassFragmentDensityMapCreateInfoEXT>(pCreateInfo->pNext);
10660     if (pFragmentDensityMapInfo) {
10661         if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment != VK_ATTACHMENT_UNUSED) {
10662             if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment >= pCreateInfo->attachmentCount) {
10663                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10664                                 "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547",
10665                                 "fragmentDensityMapAttachment %u must be less than attachmentCount %u of for this render pass.",
10666                                 pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment, pCreateInfo->attachmentCount);
10667             } else {
10668                 if (!(pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout ==
10669                           VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
10670                       pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout == VK_IMAGE_LAYOUT_GENERAL)) {
10671                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10672                                     "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549",
10673                                     "Layout of fragmentDensityMapAttachment %u' must be equal to "
10674                                     "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, or VK_IMAGE_LAYOUT_GENERAL.",
10675                                     pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
10676                 }
10677                 if (!(pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
10678                           VK_ATTACHMENT_LOAD_OP_LOAD ||
10679                       pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
10680                           VK_ATTACHMENT_LOAD_OP_DONT_CARE)) {
10681                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10682                                     "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550",
10683                                     "FragmentDensityMapAttachment %u' must reference an attachment with a loadOp "
10684                                     "equal to VK_ATTACHMENT_LOAD_OP_LOAD or VK_ATTACHMENT_LOAD_OP_DONT_CARE.",
10685                                     pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
10686                 }
10687                 if (pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].storeOp !=
10688                     VK_ATTACHMENT_STORE_OP_DONT_CARE) {
10689                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10690                                     "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551",
10691                                     "FragmentDensityMapAttachment %u' must reference an attachment with a storeOp "
10692                                     "equal to VK_ATTACHMENT_STORE_OP_DONT_CARE.",
10693                                     pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
10694                 }
10695             }
10696         }
10697     }
10698 
10699     if (!skip) {
10700         safe_VkRenderPassCreateInfo2KHR create_info_2;
10701         ConvertVkRenderPassCreateInfoToV2KHR(pCreateInfo, &create_info_2);
10702         skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_1, create_info_2.ptr());
10703     }
10704 
10705     return skip;
10706 }
10707 
RecordCreateRenderPassState(RenderPassCreateVersion rp_version,std::shared_ptr<RENDER_PASS_STATE> & render_pass,VkRenderPass * pRenderPass)10708 void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
10709                                                          std::shared_ptr<RENDER_PASS_STATE> &render_pass,
10710                                                          VkRenderPass *pRenderPass) {
10711     render_pass->renderPass = *pRenderPass;
10712     auto create_info = render_pass->createInfo.ptr();
10713 
10714     RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
10715 
10716     for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
10717         const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
10718         for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
10719             MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
10720 
10721             // resolve attachments are considered to be written
10722             if (subpass.pResolveAttachments) {
10723                 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
10724             }
10725         }
10726         if (subpass.pDepthStencilAttachment) {
10727             MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
10728         }
10729         for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
10730             MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
10731         }
10732     }
10733 
10734     // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
10735     renderPassMap[*pRenderPass] = std::move(render_pass);
10736 }
10737 
10738 // Style note:
10739 // Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
10740 // or copy.  This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
10741 // construction or assignment.
PostCallRecordCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass,VkResult result)10742 void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
10743                                                             const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
10744                                                             VkResult result) {
10745     if (VK_SUCCESS != result) return;
10746     auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
10747     RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
10748 }
10749 
PostCallRecordCreateRenderPass2KHR(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass,VkResult result)10750 void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
10751                                                                 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
10752                                                                 VkResult result) {
10753     if (VK_SUCCESS != result) return;
10754     auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
10755     RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
10756 }
10757 
ValidateDepthStencilResolve(const debug_report_data * report_data,const VkPhysicalDeviceDepthStencilResolvePropertiesKHR & depth_stencil_resolve_props,const VkRenderPassCreateInfo2KHR * pCreateInfo)10758 static bool ValidateDepthStencilResolve(const debug_report_data *report_data,
10759                                         const VkPhysicalDeviceDepthStencilResolvePropertiesKHR &depth_stencil_resolve_props,
10760                                         const VkRenderPassCreateInfo2KHR *pCreateInfo) {
10761     bool skip = false;
10762 
10763     // If the pNext list of VkSubpassDescription2KHR includes a VkSubpassDescriptionDepthStencilResolveKHR structure,
10764     // then that structure describes depth/stencil resolve operations for the subpass.
10765     for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
10766         VkSubpassDescription2KHR subpass = pCreateInfo->pSubpasses[i];
10767         const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(subpass.pNext);
10768 
10769         if (resolve == nullptr) {
10770             continue;
10771         }
10772 
10773         if (resolve->pDepthStencilResolveAttachment != nullptr &&
10774             resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED) {
10775             if (subpass.pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED) {
10776                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10777                                 "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03177",
10778                                 "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10779                                 "structure with resolve attachment %u, but pDepthStencilAttachment=VK_ATTACHMENT_UNUSED.",
10780                                 i, resolve->pDepthStencilResolveAttachment->attachment);
10781             }
10782             if (resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR && resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR) {
10783                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10784                                 "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03178",
10785                                 "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10786                                 "structure with resolve attachment %u, but both depth and stencil resolve modes are "
10787                                 "VK_RESOLVE_MODE_NONE_KHR.",
10788                                 i, resolve->pDepthStencilResolveAttachment->attachment);
10789             }
10790         }
10791 
10792         if (resolve->pDepthStencilResolveAttachment != nullptr &&
10793             pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples == VK_SAMPLE_COUNT_1_BIT) {
10794             skip |= log_msg(
10795                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10796                 "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03179",
10797                 "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10798                 "structure with resolve attachment %u. However pDepthStencilAttachment has sample count=VK_SAMPLE_COUNT_1_BIT.",
10799                 i, resolve->pDepthStencilResolveAttachment->attachment);
10800         }
10801 
10802         if (pCreateInfo->pAttachments[resolve->pDepthStencilResolveAttachment->attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
10803             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10804                             "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03180",
10805                             "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10806                             "structure with resolve attachment %u which has sample count=VK_SAMPLE_COUNT_1_BIT.",
10807                             i, resolve->pDepthStencilResolveAttachment->attachment);
10808         }
10809 
10810         VkFormat pDepthStencilAttachmentFormat = pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].format;
10811         VkFormat pDepthStencilResolveAttachmentFormat =
10812             pCreateInfo->pAttachments[resolve->pDepthStencilResolveAttachment->attachment].format;
10813 
10814         if ((FormatDepthSize(pDepthStencilAttachmentFormat) != FormatDepthSize(pDepthStencilResolveAttachmentFormat)) ||
10815             (FormatDepthNumericalType(pDepthStencilAttachmentFormat) !=
10816              FormatDepthNumericalType(pDepthStencilResolveAttachmentFormat))) {
10817             skip |=
10818                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10819                         "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03181",
10820                         "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10821                         "structure with resolve attachment %u which has a depth component (size %u). The depth component "
10822                         "of pDepthStencilAttachment must have the same number of bits (currently %u) and the same numerical type.",
10823                         i, resolve->pDepthStencilResolveAttachment->attachment,
10824                         FormatDepthSize(pDepthStencilResolveAttachmentFormat), FormatDepthSize(pDepthStencilAttachmentFormat));
10825         }
10826 
10827         if ((FormatStencilSize(pDepthStencilAttachmentFormat) != FormatStencilSize(pDepthStencilResolveAttachmentFormat)) ||
10828             (FormatStencilNumericalType(pDepthStencilAttachmentFormat) !=
10829              FormatStencilNumericalType(pDepthStencilResolveAttachmentFormat))) {
10830             skip |=
10831                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10832                         "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03182",
10833                         "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10834                         "structure with resolve attachment %u which has a stencil component (size %u). The stencil component "
10835                         "of pDepthStencilAttachment must have the same number of bits (currently %u) and the same numerical type.",
10836                         i, resolve->pDepthStencilResolveAttachment->attachment,
10837                         FormatStencilSize(pDepthStencilResolveAttachmentFormat), FormatStencilSize(pDepthStencilAttachmentFormat));
10838         }
10839 
10840         if (!(resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
10841               resolve->depthResolveMode & depth_stencil_resolve_props.supportedDepthResolveModes)) {
10842             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10843                             "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-03183",
10844                             "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10845                             "structure with invalid depthResolveMode=%u.",
10846                             i, resolve->depthResolveMode);
10847         }
10848 
10849         if (!(resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
10850               resolve->stencilResolveMode & depth_stencil_resolve_props.supportedStencilResolveModes)) {
10851             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10852                             "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-03184",
10853                             "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10854                             "structure with invalid stencilResolveMode=%u.",
10855                             i, resolve->stencilResolveMode);
10856         }
10857 
10858         if (FormatIsDepthAndStencil(pDepthStencilResolveAttachmentFormat) &&
10859             depth_stencil_resolve_props.independentResolve == VK_FALSE &&
10860             depth_stencil_resolve_props.independentResolveNone == VK_FALSE &&
10861             !(resolve->depthResolveMode == resolve->stencilResolveMode)) {
10862             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10863                             "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03185",
10864                             "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10865                             "structure. The values of depthResolveMode (%u) and stencilResolveMode (%u) must be identical.",
10866                             i, resolve->depthResolveMode, resolve->stencilResolveMode);
10867         }
10868 
10869         if (FormatIsDepthAndStencil(pDepthStencilResolveAttachmentFormat) &&
10870             depth_stencil_resolve_props.independentResolve == VK_FALSE &&
10871             depth_stencil_resolve_props.independentResolveNone == VK_TRUE &&
10872             !(resolve->depthResolveMode == resolve->stencilResolveMode || resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
10873               resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR)) {
10874             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10875                             "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03186",
10876                             "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
10877                             "structure. The values of depthResolveMode (%u) and stencilResolveMode (%u) must be identical, or "
10878                             "one of them must be %u.",
10879                             i, resolve->depthResolveMode, resolve->stencilResolveMode, VK_RESOLVE_MODE_NONE_KHR);
10880         }
10881     }
10882 
10883     return skip;
10884 }
10885 
PreCallValidateCreateRenderPass2KHR(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)10886 bool CoreChecks::PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
10887                                                      const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
10888     bool skip = false;
10889 
10890     if (device_extensions.vk_khr_depth_stencil_resolve) {
10891         skip |= ValidateDepthStencilResolve(report_data, phys_dev_ext_props.depth_stencil_resolve_props, pCreateInfo);
10892     }
10893 
10894     safe_VkRenderPassCreateInfo2KHR create_info_2(pCreateInfo);
10895     skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_2, create_info_2.ptr());
10896 
10897     return skip;
10898 }
10899 
ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE * pCB,char const * cmd_name,const char * error_code) const10900 bool CoreChecks::ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE *pCB, char const *cmd_name, const char *error_code) const {
10901     bool skip = false;
10902     if (pCB->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
10903         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
10904                         HandleToUint64(pCB->commandBuffer), error_code, "Cannot execute command %s on a secondary command buffer.",
10905                         cmd_name);
10906     }
10907     return skip;
10908 }
10909 
VerifyRenderAreaBounds(const VkRenderPassBeginInfo * pRenderPassBegin) const10910 bool CoreChecks::VerifyRenderAreaBounds(const VkRenderPassBeginInfo *pRenderPassBegin) const {
10911     bool skip = false;
10912     const safe_VkFramebufferCreateInfo *pFramebufferInfo = &GetFramebufferState(pRenderPassBegin->framebuffer)->createInfo;
10913     if (pRenderPassBegin->renderArea.offset.x < 0 ||
10914         (pRenderPassBegin->renderArea.offset.x + pRenderPassBegin->renderArea.extent.width) > pFramebufferInfo->width ||
10915         pRenderPassBegin->renderArea.offset.y < 0 ||
10916         (pRenderPassBegin->renderArea.offset.y + pRenderPassBegin->renderArea.extent.height) > pFramebufferInfo->height) {
10917         skip |= static_cast<bool>(log_msg(
10918             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
10919             kVUID_Core_DrawState_InvalidRenderArea,
10920             "Cannot execute a render pass with renderArea not within the bound of the framebuffer. RenderArea: x %d, y %d, width "
10921             "%d, height %d. Framebuffer: width %d, height %d.",
10922             pRenderPassBegin->renderArea.offset.x, pRenderPassBegin->renderArea.offset.y, pRenderPassBegin->renderArea.extent.width,
10923             pRenderPassBegin->renderArea.extent.height, pFramebufferInfo->width, pFramebufferInfo->height));
10924     }
10925     return skip;
10926 }
10927 
VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo * pRenderPassBeginInfo) const10928 bool CoreChecks::VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo *pRenderPassBeginInfo) const {
10929     bool skip = false;
10930     const VkRenderPassAttachmentBeginInfoKHR *pRenderPassAttachmentBeginInfo =
10931         lvl_find_in_chain<VkRenderPassAttachmentBeginInfoKHR>(pRenderPassBeginInfo->pNext);
10932 
10933     if (pRenderPassAttachmentBeginInfo && pRenderPassAttachmentBeginInfo->attachmentCount != 0) {
10934         const safe_VkFramebufferCreateInfo *pFramebufferCreateInfo =
10935             &GetFramebufferState(pRenderPassBeginInfo->framebuffer)->createInfo;
10936         const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
10937             lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pFramebufferCreateInfo->pNext);
10938         if ((pFramebufferCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
10939             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
10940                             HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03207",
10941                             "VkRenderPassBeginInfo: Image views specified at render pass begin, but framebuffer not created with "
10942                             "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR");
10943         } else if (pFramebufferAttachmentsCreateInfo) {
10944             if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pRenderPassAttachmentBeginInfo->attachmentCount) {
10945                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
10946                                 HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03208",
10947                                 "VkRenderPassBeginInfo: %u image views specified at render pass begin, but framebuffer "
10948                                 "created expecting %u attachments",
10949                                 pRenderPassAttachmentBeginInfo->attachmentCount,
10950                                 pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
10951             } else {
10952                 const safe_VkRenderPassCreateInfo2KHR *pRenderPassCreateInfo =
10953                     &GetRenderPassState(pRenderPassBeginInfo->renderPass)->createInfo;
10954                 for (uint32_t i = 0; i < pRenderPassAttachmentBeginInfo->attachmentCount; ++i) {
10955                     const VkImageViewCreateInfo *pImageViewCreateInfo =
10956                         &GetImageViewState(pRenderPassAttachmentBeginInfo->pAttachments[i])->create_info;
10957                     const VkFramebufferAttachmentImageInfoKHR *pFramebufferAttachmentImageInfo =
10958                         &pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
10959                     const VkImageCreateInfo *pImageCreateInfo = &GetImageState(pImageViewCreateInfo->image)->createInfo;
10960 
10961                     if (pFramebufferAttachmentImageInfo->flags != pImageCreateInfo->flags) {
10962                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
10963                                         HandleToUint64(pRenderPassBeginInfo->renderPass),
10964                                         "VUID-VkRenderPassBeginInfo-framebuffer-03209",
10965                                         "VkRenderPassBeginInfo: Image view #%u created from an image with flags set as 0x%X, "
10966                                         "but image info #%u used to create the framebuffer had flags set as 0x%X",
10967                                         i, pImageCreateInfo->flags, i, pFramebufferAttachmentImageInfo->flags);
10968                     }
10969 
10970                     if (pFramebufferAttachmentImageInfo->usage != pImageCreateInfo->usage) {
10971                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
10972                                         HandleToUint64(pRenderPassBeginInfo->renderPass),
10973                                         "VUID-VkRenderPassBeginInfo-framebuffer-03210",
10974                                         "VkRenderPassBeginInfo: Image view #%u created from an image with usage set as 0x%X, "
10975                                         "but image info #%u used to create the framebuffer had usage set as 0x%X",
10976                                         i, pImageCreateInfo->usage, i, pFramebufferAttachmentImageInfo->usage);
10977                     }
10978 
10979                     if (pFramebufferAttachmentImageInfo->width != pImageCreateInfo->extent.width) {
10980                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
10981                                         HandleToUint64(pRenderPassBeginInfo->renderPass),
10982                                         "VUID-VkRenderPassBeginInfo-framebuffer-03211",
10983                                         "VkRenderPassBeginInfo: Image view #%u created from an image with width set as %u, "
10984                                         "but image info #%u used to create the framebuffer had width set as %u",
10985                                         i, pImageCreateInfo->extent.width, i, pFramebufferAttachmentImageInfo->width);
10986                     }
10987 
10988                     if (pFramebufferAttachmentImageInfo->height != pImageCreateInfo->extent.height) {
10989                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
10990                                         HandleToUint64(pRenderPassBeginInfo->renderPass),
10991                                         "VUID-VkRenderPassBeginInfo-framebuffer-03212",
10992                                         "VkRenderPassBeginInfo: Image view #%u created from an image with height set as %u, "
10993                                         "but image info #%u used to create the framebuffer had height set as %u",
10994                                         i, pImageCreateInfo->extent.height, i, pFramebufferAttachmentImageInfo->height);
10995                     }
10996 
10997                     if (pFramebufferAttachmentImageInfo->layerCount != pImageViewCreateInfo->subresourceRange.layerCount) {
10998                         skip |= log_msg(
10999                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11000                             HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03213",
11001                             "VkRenderPassBeginInfo: Image view #%u created with a subresource range with a layerCount of %u, "
11002                             "but image info #%u used to create the framebuffer had layerCount set as %u",
11003                             i, pImageViewCreateInfo->subresourceRange.layerCount, i, pFramebufferAttachmentImageInfo->layerCount);
11004                     }
11005 
11006                     const VkImageFormatListCreateInfoKHR *pImageFormatListCreateInfo =
11007                         lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pImageCreateInfo->pNext);
11008                     if (pImageFormatListCreateInfo) {
11009                         if (pImageFormatListCreateInfo->viewFormatCount != pFramebufferAttachmentImageInfo->viewFormatCount) {
11010                             skip |= log_msg(
11011                                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11012                                 HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03214",
11013                                 "VkRenderPassBeginInfo: Image view #%u created with an image with a viewFormatCount of %u, "
11014                                 "but image info #%u used to create the framebuffer had viewFormatCount set as %u",
11015                                 i, pImageFormatListCreateInfo->viewFormatCount, i,
11016                                 pFramebufferAttachmentImageInfo->viewFormatCount);
11017                         }
11018 
11019                         for (uint32_t j = 0; j < pImageFormatListCreateInfo->viewFormatCount; ++j) {
11020                             bool formatFound = false;
11021                             for (uint32_t k = 0; k < pFramebufferAttachmentImageInfo->viewFormatCount; ++k) {
11022                                 if (pImageFormatListCreateInfo->pViewFormats[j] ==
11023                                     pFramebufferAttachmentImageInfo->pViewFormats[k]) {
11024                                     formatFound = true;
11025                                 }
11026                             }
11027                             if (!formatFound) {
11028                                 skip |=
11029                                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11030                                             HandleToUint64(pRenderPassBeginInfo->renderPass),
11031                                             "VUID-VkRenderPassBeginInfo-framebuffer-03215",
11032                                             "VkRenderPassBeginInfo: Image view #%u created with an image including the format "
11033                                             "%s in its view format list, "
11034                                             "but image info #%u used to create the framebuffer does not include this format",
11035                                             i, string_VkFormat(pImageFormatListCreateInfo->pViewFormats[j]), i);
11036                             }
11037                         }
11038                     }
11039 
11040                     if (pRenderPassCreateInfo->pAttachments[i].format != pImageViewCreateInfo->format) {
11041                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11042                                         HandleToUint64(pRenderPassBeginInfo->renderPass),
11043                                         "VUID-VkRenderPassBeginInfo-framebuffer-03216",
11044                                         "VkRenderPassBeginInfo: Image view #%u created with a format of %s, "
11045                                         "but render pass attachment description #%u created with a format of %s",
11046                                         i, string_VkFormat(pImageViewCreateInfo->format), i,
11047                                         string_VkFormat(pRenderPassCreateInfo->pAttachments[i].format));
11048                     }
11049 
11050                     if (pRenderPassCreateInfo->pAttachments[i].samples != pImageCreateInfo->samples) {
11051                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11052                                         HandleToUint64(pRenderPassBeginInfo->renderPass),
11053                                         "VUID-VkRenderPassBeginInfo-framebuffer-03217",
11054                                         "VkRenderPassBeginInfo: Image view #%u created with an image with %s samples, "
11055                                         "but render pass attachment description #%u created with %s samples",
11056                                         i, string_VkSampleCountFlagBits(pImageCreateInfo->samples), i,
11057                                         string_VkSampleCountFlagBits(pRenderPassCreateInfo->pAttachments[i].samples));
11058                     }
11059 
11060                     if (pImageViewCreateInfo->subresourceRange.levelCount != 1) {
11061                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
11062                                         HandleToUint64(pRenderPassAttachmentBeginInfo->pAttachments[i]),
11063                                         "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218",
11064                                         "VkRenderPassAttachmentBeginInfo: Image view #%u created with multiple (%u) mip levels.", i,
11065                                         pImageViewCreateInfo->subresourceRange.levelCount);
11066                     }
11067 
11068                     if (((pImageViewCreateInfo->components.r != VK_COMPONENT_SWIZZLE_IDENTITY) &&
11069                          (pImageViewCreateInfo->components.r != VK_COMPONENT_SWIZZLE_R)) ||
11070                         ((pImageViewCreateInfo->components.g != VK_COMPONENT_SWIZZLE_IDENTITY) &&
11071                          (pImageViewCreateInfo->components.g != VK_COMPONENT_SWIZZLE_G)) ||
11072                         ((pImageViewCreateInfo->components.b != VK_COMPONENT_SWIZZLE_IDENTITY) &&
11073                          (pImageViewCreateInfo->components.b != VK_COMPONENT_SWIZZLE_B)) ||
11074                         ((pImageViewCreateInfo->components.a != VK_COMPONENT_SWIZZLE_IDENTITY) &&
11075                          (pImageViewCreateInfo->components.a != VK_COMPONENT_SWIZZLE_A))) {
11076                         skip |=
11077                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
11078                                     HandleToUint64(pRenderPassAttachmentBeginInfo->pAttachments[i]),
11079                                     "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219",
11080                                     "VkRenderPassAttachmentBeginInfo: Image view #%u created with non-identity swizzle. All "
11081                                     "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
11082                                     "swizzle values:\n"
11083                                     "r swizzle = %s\n"
11084                                     "g swizzle = %s\n"
11085                                     "b swizzle = %s\n"
11086                                     "a swizzle = %s\n",
11087                                     i, string_VkComponentSwizzle(pImageViewCreateInfo->components.r),
11088                                     string_VkComponentSwizzle(pImageViewCreateInfo->components.g),
11089                                     string_VkComponentSwizzle(pImageViewCreateInfo->components.b),
11090                                     string_VkComponentSwizzle(pImageViewCreateInfo->components.a));
11091                     }
11092                 }
11093             }
11094         }
11095     }
11096 
11097     return skip;
11098 }
11099 
11100 // If this is a stencil format, make sure the stencil[Load|Store]Op flag is checked, while if it is a depth/color attachment the
11101 // [load|store]Op flag must be checked
11102 // TODO: The memory valid flag in DEVICE_MEMORY_STATE should probably be split to track the validity of stencil memory separately.
11103 template <typename T>
FormatSpecificLoadAndStoreOpSettings(VkFormat format,T color_depth_op,T stencil_op,T op)11104 static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_op, T stencil_op, T op) {
11105     if (color_depth_op != op && stencil_op != op) {
11106         return false;
11107     }
11108     bool check_color_depth_load_op = !FormatIsStencilOnly(format);
11109     bool check_stencil_load_op = FormatIsDepthAndStencil(format) || !check_color_depth_load_op;
11110 
11111     return ((check_color_depth_load_op && (color_depth_op == op)) || (check_stencil_load_op && (stencil_op == op)));
11112 }
11113 
ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer,RenderPassCreateVersion rp_version,const VkRenderPassBeginInfo * pRenderPassBegin) const11114 bool CoreChecks::ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
11115                                             const VkRenderPassBeginInfo *pRenderPassBegin) const {
11116     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11117     assert(cb_state);
11118     auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
11119     auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
11120 
11121     bool skip = false;
11122     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
11123     const char *vuid;
11124     const char *const function_name = use_rp2 ? "vkCmdBeginRenderPass2KHR()" : "vkCmdBeginRenderPass()";
11125 
11126     if (render_pass_state) {
11127         uint32_t clear_op_size = 0;  // Make sure pClearValues is at least as large as last LOAD_OP_CLEAR
11128 
11129         // Handle extension struct from EXT_sample_locations
11130         const VkRenderPassSampleLocationsBeginInfoEXT *pSampleLocationsBeginInfo =
11131             lvl_find_in_chain<VkRenderPassSampleLocationsBeginInfoEXT>(pRenderPassBegin->pNext);
11132         if (pSampleLocationsBeginInfo) {
11133             for (uint32_t i = 0; i < pSampleLocationsBeginInfo->attachmentInitialSampleLocationsCount; ++i) {
11134                 if (pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex >=
11135                     render_pass_state->createInfo.attachmentCount) {
11136                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
11137                                     "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531",
11138                                     "Attachment index %u specified by attachment sample locations %u is greater than the "
11139                                     "attachment count of %u for the render pass being begun.",
11140                                     pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex, i,
11141                                     render_pass_state->createInfo.attachmentCount);
11142                 }
11143             }
11144 
11145             for (uint32_t i = 0; i < pSampleLocationsBeginInfo->postSubpassSampleLocationsCount; ++i) {
11146                 if (pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex >=
11147                     render_pass_state->createInfo.subpassCount) {
11148                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
11149                                     "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532",
11150                                     "Subpass index %u specified by subpass sample locations %u is greater than the subpass count "
11151                                     "of %u for the render pass being begun.",
11152                                     pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex, i,
11153                                     render_pass_state->createInfo.subpassCount);
11154                 }
11155             }
11156         }
11157 
11158         for (uint32_t i = 0; i < render_pass_state->createInfo.attachmentCount; ++i) {
11159             auto pAttachment = &render_pass_state->createInfo.pAttachments[i];
11160             if (FormatSpecificLoadAndStoreOpSettings(pAttachment->format, pAttachment->loadOp, pAttachment->stencilLoadOp,
11161                                                      VK_ATTACHMENT_LOAD_OP_CLEAR)) {
11162                 clear_op_size = static_cast<uint32_t>(i) + 1;
11163             }
11164         }
11165 
11166         if (clear_op_size > pRenderPassBegin->clearValueCount) {
11167             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11168                             HandleToUint64(render_pass_state->renderPass), "VUID-VkRenderPassBeginInfo-clearValueCount-00902",
11169                             "In %s the VkRenderPassBeginInfo struct has a clearValueCount of %u but there "
11170                             "must be at least %u entries in pClearValues array to account for the highest index attachment in "
11171                             "%s that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array is indexed by "
11172                             "attachment number so even if some pClearValues entries between 0 and %u correspond to attachments "
11173                             "that aren't cleared they will be ignored.",
11174                             function_name, pRenderPassBegin->clearValueCount, clear_op_size,
11175                             report_data->FormatHandle(render_pass_state->renderPass).c_str(), clear_op_size, clear_op_size - 1);
11176         }
11177         skip |= VerifyFramebufferAndRenderPassImageViews(pRenderPassBegin);
11178         skip |= VerifyRenderAreaBounds(pRenderPassBegin);
11179         skip |= VerifyFramebufferAndRenderPassLayouts(rp_version, cb_state, pRenderPassBegin,
11180                                                       GetFramebufferState(pRenderPassBegin->framebuffer));
11181         if (framebuffer->rp_state->renderPass != render_pass_state->renderPass) {
11182             skip |= ValidateRenderPassCompatibility("render pass", render_pass_state, "framebuffer", framebuffer->rp_state.get(),
11183                                                     function_name, "VUID-VkRenderPassBeginInfo-renderPass-00904");
11184         }
11185 
11186         vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-renderpass" : "VUID-vkCmdBeginRenderPass-renderpass";
11187         skip |= InsideRenderPass(cb_state, function_name, vuid);
11188         skip |= ValidateDependencies(framebuffer, render_pass_state);
11189 
11190         vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-bufferlevel" : "VUID-vkCmdBeginRenderPass-bufferlevel";
11191         skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
11192 
11193         vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool";
11194         skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
11195 
11196         const CMD_TYPE cmd_type = use_rp2 ? CMD_BEGINRENDERPASS2KHR : CMD_BEGINRENDERPASS;
11197         skip |= ValidateCmd(cb_state, cmd_type, function_name);
11198     }
11199 
11200     auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
11201     if (chained_device_group_struct) {
11202         skip |= ValidateDeviceMaskToPhysicalDeviceCount(
11203             chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11204             HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905");
11205         skip |= ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11206                                          HandleToUint64(pRenderPassBegin->renderPass),
11207                                          "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906");
11208         skip |= ValidateDeviceMaskToCommandBuffer(
11209             cb_state, chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11210             HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907");
11211 
11212         if (chained_device_group_struct->deviceRenderAreaCount != 0 &&
11213             chained_device_group_struct->deviceRenderAreaCount != physical_device_count) {
11214             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
11215                             HandleToUint64(pRenderPassBegin->renderPass),
11216                             "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908",
11217                             "deviceRenderAreaCount[%" PRIu32 "] is invaild. Physical device count is %" PRIu32 ".",
11218                             chained_device_group_struct->deviceRenderAreaCount, physical_device_count);
11219         }
11220     }
11221     return skip;
11222 }
11223 
PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)11224 bool CoreChecks::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
11225                                                    VkSubpassContents contents) {
11226     bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_1, pRenderPassBegin);
11227     return skip;
11228 }
11229 
PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)11230 bool CoreChecks::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
11231                                                        const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
11232     bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_2, pRenderPassBegin);
11233     return skip;
11234 }
11235 
RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassContents contents)11236 void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
11237                                                            const VkRenderPassBeginInfo *pRenderPassBegin,
11238                                                            const VkSubpassContents contents) {
11239     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11240     auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
11241     auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
11242 
11243     if (render_pass_state) {
11244         cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
11245         cb_state->activeRenderPass = render_pass_state;
11246         // This is a shallow copy as that is all that is needed for now
11247         cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
11248         cb_state->activeSubpass = 0;
11249         cb_state->activeSubpassContents = contents;
11250         cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
11251         // Connect this framebuffer and its children to this cmdBuffer
11252         AddFramebufferBinding(cb_state, framebuffer);
11253         // Connect this RP to cmdBuffer
11254         AddCommandBufferBinding(&render_pass_state->cb_bindings,
11255                                 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass), cb_state);
11256 
11257         auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
11258         if (chained_device_group_struct) {
11259             cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
11260         } else {
11261             cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
11262         }
11263     }
11264 }
11265 
RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassContents contents)11266 void CoreChecks::RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
11267                                                  const VkSubpassContents contents) {
11268     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11269     auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
11270     auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
11271     if (render_pass_state) {
11272         // transition attachments to the correct layouts for beginning of renderPass and first subpass
11273         TransitionBeginRenderPassLayouts(cb_state, render_pass_state, framebuffer);
11274     }
11275 }
11276 
PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)11277 void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
11278                                                              const VkRenderPassBeginInfo *pRenderPassBegin,
11279                                                              VkSubpassContents contents) {
11280     RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
11281 }
11282 
PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)11283 void CoreChecks::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
11284                                                  VkSubpassContents contents) {
11285     StateTracker::PreCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
11286     RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, contents);
11287 }
11288 
PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)11289 void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
11290                                                                  const VkRenderPassBeginInfo *pRenderPassBegin,
11291                                                                  const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
11292     RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
11293 }
11294 
PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)11295 void CoreChecks::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
11296                                                      const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
11297     StateTracker::PreCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
11298     RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
11299 }
11300 
ValidateCmdNextSubpass(RenderPassCreateVersion rp_version,VkCommandBuffer commandBuffer) const11301 bool CoreChecks::ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
11302     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11303     assert(cb_state);
11304     bool skip = false;
11305     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
11306     const char *vuid;
11307     const char *const function_name = use_rp2 ? "vkCmdNextSubpass2KHR()" : "vkCmdNextSubpass()";
11308 
11309     vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-bufferlevel" : "VUID-vkCmdNextSubpass-bufferlevel";
11310     skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
11311 
11312     vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdNextSubpass-commandBuffer-cmdpool";
11313     skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
11314     const CMD_TYPE cmd_type = use_rp2 ? CMD_NEXTSUBPASS2KHR : CMD_NEXTSUBPASS;
11315     skip |= ValidateCmd(cb_state, cmd_type, function_name);
11316 
11317     vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-renderpass" : "VUID-vkCmdNextSubpass-renderpass";
11318     skip |= OutsideRenderPass(cb_state, function_name, vuid);
11319 
11320     auto subpassCount = cb_state->activeRenderPass->createInfo.subpassCount;
11321     if (cb_state->activeSubpass == subpassCount - 1) {
11322         vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-None-03102" : "VUID-vkCmdNextSubpass-None-00909";
11323         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11324                         HandleToUint64(commandBuffer), vuid, "%s: Attempted to advance beyond final subpass.", function_name);
11325     }
11326     return skip;
11327 }
11328 
PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)11329 bool CoreChecks::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
11330     return ValidateCmdNextSubpass(RENDER_PASS_VERSION_1, commandBuffer);
11331 }
11332 
PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)11333 bool CoreChecks::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
11334                                                    const VkSubpassEndInfoKHR *pSubpassEndInfo) {
11335     return ValidateCmdNextSubpass(RENDER_PASS_VERSION_2, commandBuffer);
11336 }
11337 
RecordCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)11338 void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
11339     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11340     cb_state->activeSubpass++;
11341     cb_state->activeSubpassContents = contents;
11342 }
11343 
RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer,VkSubpassContents contents)11344 void CoreChecks::RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
11345     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11346     TransitionSubpassLayouts(cb_state, cb_state->activeRenderPass, cb_state->activeSubpass,
11347                              GetFramebufferState(cb_state->activeRenderPassBeginInfo.framebuffer));
11348 }
11349 
PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)11350 void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
11351     RecordCmdNextSubpass(commandBuffer, contents);
11352 }
11353 
PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)11354 void CoreChecks::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
11355     StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
11356     RecordCmdNextSubpassLayouts(commandBuffer, contents);
11357 }
11358 
PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)11359 void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
11360                                                               const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
11361                                                               const VkSubpassEndInfoKHR *pSubpassEndInfo) {
11362     RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
11363 }
11364 
PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)11365 void CoreChecks::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
11366                                                   const VkSubpassEndInfoKHR *pSubpassEndInfo) {
11367     StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
11368     RecordCmdNextSubpassLayouts(commandBuffer, pSubpassBeginInfo->contents);
11369 }
11370 
ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version,VkCommandBuffer commandBuffer) const11371 bool CoreChecks::ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
11372     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11373     assert(cb_state);
11374     bool skip = false;
11375     const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
11376     const char *vuid;
11377     const char *const function_name = use_rp2 ? "vkCmdEndRenderPass2KHR()" : "vkCmdEndRenderPass()";
11378 
11379     RENDER_PASS_STATE *rp_state = cb_state->activeRenderPass;
11380     if (rp_state) {
11381         if (cb_state->activeSubpass != rp_state->createInfo.subpassCount - 1) {
11382             vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-None-03103" : "VUID-vkCmdEndRenderPass-None-00910";
11383             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11384                             HandleToUint64(commandBuffer), vuid, "%s: Called before reaching final subpass.", function_name);
11385         }
11386     }
11387 
11388     vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-renderpass" : "VUID-vkCmdEndRenderPass-renderpass";
11389     skip |= OutsideRenderPass(cb_state, function_name, vuid);
11390 
11391     vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-bufferlevel" : "VUID-vkCmdEndRenderPass-bufferlevel";
11392     skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
11393 
11394     vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdEndRenderPass-commandBuffer-cmdpool";
11395     skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
11396 
11397     const CMD_TYPE cmd_type = use_rp2 ? CMD_ENDRENDERPASS2KHR : CMD_ENDRENDERPASS;
11398     skip |= ValidateCmd(cb_state, cmd_type, function_name);
11399     return skip;
11400 }
11401 
PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer)11402 bool CoreChecks::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) {
11403     bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_1, commandBuffer);
11404     return skip;
11405 }
11406 
PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)11407 bool CoreChecks::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
11408     bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_2, commandBuffer);
11409     return skip;
11410 }
11411 
RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer)11412 void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
11413     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11414     cb_state->activeRenderPass = nullptr;
11415     cb_state->activeSubpass = 0;
11416     cb_state->activeFramebuffer = VK_NULL_HANDLE;
11417 }
11418 
RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer)11419 void CoreChecks::RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer) {
11420     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11421     FRAMEBUFFER_STATE *framebuffer = GetFramebufferState(cb_state->activeFramebuffer);
11422     TransitionFinalSubpassLayouts(cb_state, &cb_state->activeRenderPassBeginInfo, framebuffer);
11423 }
11424 
PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer)11425 void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
11426     RecordCmdEndRenderPassState(commandBuffer);
11427 }
11428 
PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer)11429 void CoreChecks::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
11430     // Record the end at the CoreLevel to ensure StateTracker cleanup doesn't step on anything we need.
11431     RecordCmdEndRenderPassLayouts(commandBuffer);
11432     StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
11433 }
11434 
PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)11435 void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
11436                                                                 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
11437     RecordCmdEndRenderPassState(commandBuffer);
11438 }
11439 
PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)11440 void CoreChecks::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
11441     StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
11442     RecordCmdEndRenderPassLayouts(commandBuffer);
11443 }
11444 
ValidateFramebuffer(VkCommandBuffer primaryBuffer,const CMD_BUFFER_STATE * pCB,VkCommandBuffer secondaryBuffer,const CMD_BUFFER_STATE * pSubCB,const char * caller)11445 bool CoreChecks::ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BUFFER_STATE *pCB, VkCommandBuffer secondaryBuffer,
11446                                      const CMD_BUFFER_STATE *pSubCB, const char *caller) {
11447     bool skip = false;
11448     if (!pSubCB->beginInfo.pInheritanceInfo) {
11449         return skip;
11450     }
11451     VkFramebuffer primary_fb = pCB->activeFramebuffer;
11452     VkFramebuffer secondary_fb = pSubCB->beginInfo.pInheritanceInfo->framebuffer;
11453     if (secondary_fb != VK_NULL_HANDLE) {
11454         if (primary_fb != secondary_fb) {
11455             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11456                             HandleToUint64(primaryBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
11457                             "vkCmdExecuteCommands() called w/ invalid secondary %s which has a %s"
11458                             " that is not the same as the primary command buffer's current active %s.",
11459                             report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str(),
11460                             report_data->FormatHandle(primary_fb).c_str());
11461         }
11462         auto fb = GetFramebufferState(secondary_fb);
11463         if (!fb) {
11464             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11465                             HandleToUint64(primaryBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
11466                             "vkCmdExecuteCommands() called w/ invalid %s which has invalid %s.",
11467                             report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str());
11468             return skip;
11469         }
11470     }
11471     return skip;
11472 }
11473 
ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE * pCB,const CMD_BUFFER_STATE * pSubCB)11474 bool CoreChecks::ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE *pCB, const CMD_BUFFER_STATE *pSubCB) {
11475     bool skip = false;
11476     unordered_set<int> activeTypes;
11477     if (!disabled.query_validation) {
11478         for (auto queryObject : pCB->activeQueries) {
11479             auto query_pool_state = GetQueryPoolState(queryObject.pool);
11480             if (query_pool_state) {
11481                 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS &&
11482                     pSubCB->beginInfo.pInheritanceInfo) {
11483                     VkQueryPipelineStatisticFlags cmdBufStatistics = pSubCB->beginInfo.pInheritanceInfo->pipelineStatistics;
11484                     if ((cmdBufStatistics & query_pool_state->createInfo.pipelineStatistics) != cmdBufStatistics) {
11485                         skip |= log_msg(
11486                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11487                             HandleToUint64(pCB->commandBuffer), "VUID-vkCmdExecuteCommands-commandBuffer-00104",
11488                             "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
11489                             ". Pipeline statistics is being queried so the command buffer must have all bits set on the queryPool.",
11490                             report_data->FormatHandle(pCB->commandBuffer).c_str(),
11491                             report_data->FormatHandle(queryObject.pool).c_str());
11492                     }
11493                 }
11494                 activeTypes.insert(query_pool_state->createInfo.queryType);
11495             }
11496         }
11497         for (auto queryObject : pSubCB->startedQueries) {
11498             auto query_pool_state = GetQueryPoolState(queryObject.pool);
11499             if (query_pool_state && activeTypes.count(query_pool_state->createInfo.queryType)) {
11500                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11501                                 HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
11502                                 "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
11503                                 " of type %d but a query of that type has been started on secondary %s.",
11504                                 report_data->FormatHandle(pCB->commandBuffer).c_str(),
11505                                 report_data->FormatHandle(queryObject.pool).c_str(), query_pool_state->createInfo.queryType,
11506                                 report_data->FormatHandle(pSubCB->commandBuffer).c_str());
11507             }
11508         }
11509     }
11510     auto primary_pool = GetCommandPoolState(pCB->createInfo.commandPool);
11511     auto secondary_pool = GetCommandPoolState(pSubCB->createInfo.commandPool);
11512     if (primary_pool && secondary_pool && (primary_pool->queueFamilyIndex != secondary_pool->queueFamilyIndex)) {
11513         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11514                         HandleToUint64(pSubCB->commandBuffer), kVUID_Core_DrawState_InvalidQueueFamily,
11515                         "vkCmdExecuteCommands(): Primary %s created in queue family %d has secondary "
11516                         "%s created in queue family %d.",
11517                         report_data->FormatHandle(pCB->commandBuffer).c_str(), primary_pool->queueFamilyIndex,
11518                         report_data->FormatHandle(pSubCB->commandBuffer).c_str(), secondary_pool->queueFamilyIndex);
11519     }
11520 
11521     return skip;
11522 }
11523 
PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBuffersCount,const VkCommandBuffer * pCommandBuffers)11524 bool CoreChecks::PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
11525                                                    const VkCommandBuffer *pCommandBuffers) {
11526     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11527     assert(cb_state);
11528     bool skip = false;
11529     const CMD_BUFFER_STATE *sub_cb_state = NULL;
11530     std::unordered_set<const CMD_BUFFER_STATE *> linked_command_buffers;
11531 
11532     for (uint32_t i = 0; i < commandBuffersCount; i++) {
11533         sub_cb_state = GetCBState(pCommandBuffers[i]);
11534         assert(sub_cb_state);
11535         if (VK_COMMAND_BUFFER_LEVEL_PRIMARY == sub_cb_state->createInfo.level) {
11536             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11537                             HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00088",
11538                             "vkCmdExecuteCommands() called w/ Primary %s in element %u of pCommandBuffers array. All "
11539                             "cmd buffers in pCommandBuffers array must be secondary.",
11540                             report_data->FormatHandle(pCommandBuffers[i]).c_str(), i);
11541         } else if (VK_COMMAND_BUFFER_LEVEL_SECONDARY == sub_cb_state->createInfo.level) {
11542             if (sub_cb_state->beginInfo.pInheritanceInfo != nullptr) {
11543                 const auto secondary_rp_state = GetRenderPassState(sub_cb_state->beginInfo.pInheritanceInfo->renderPass);
11544                 if (cb_state->activeRenderPass &&
11545                     !(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
11546                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11547                                     HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00096",
11548                                     "vkCmdExecuteCommands(): Secondary %s is executed within a %s "
11549                                     "instance scope, but the Secondary Command Buffer does not have the "
11550                                     "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
11551                                     "the vkBeginCommandBuffer() was called.",
11552                                     report_data->FormatHandle(pCommandBuffers[i]).c_str(),
11553                                     report_data->FormatHandle(cb_state->activeRenderPass->renderPass).c_str());
11554                 } else if (!cb_state->activeRenderPass &&
11555                            (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
11556                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11557                                     HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00100",
11558                                     "vkCmdExecuteCommands(): Secondary %s is executed outside a render pass "
11559                                     "instance scope, but the Secondary Command Buffer does have the "
11560                                     "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
11561                                     "the vkBeginCommandBuffer() was called.",
11562                                     report_data->FormatHandle(pCommandBuffers[i]).c_str());
11563                 } else if (cb_state->activeRenderPass &&
11564                            (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
11565                     // Make sure render pass is compatible with parent command buffer pass if has continue
11566                     if (cb_state->activeRenderPass->renderPass != secondary_rp_state->renderPass) {
11567                         skip |= ValidateRenderPassCompatibility(
11568                             "primary command buffer", cb_state->activeRenderPass, "secondary command buffer", secondary_rp_state,
11569                             "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098");
11570                     }
11571                     //  If framebuffer for secondary CB is not NULL, then it must match active FB from primaryCB
11572                     skip |=
11573                         ValidateFramebuffer(commandBuffer, cb_state, pCommandBuffers[i], sub_cb_state, "vkCmdExecuteCommands()");
11574                     if (!sub_cb_state->cmd_execute_commands_functions.empty()) {
11575                         //  Inherit primary's activeFramebuffer and while running validate functions
11576                         for (auto &function : sub_cb_state->cmd_execute_commands_functions) {
11577                             skip |= function(cb_state, cb_state->activeFramebuffer);
11578                         }
11579                     }
11580                 }
11581             }
11582         }
11583 
11584         // TODO(mlentine): Move more logic into this method
11585         skip |= ValidateSecondaryCommandBufferState(cb_state, sub_cb_state);
11586         skip |= ValidateCommandBufferState(sub_cb_state, "vkCmdExecuteCommands()", 0,
11587                                            "VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
11588         if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
11589             if (sub_cb_state->in_use.load()) {
11590                 // TODO: Find some way to differentiate between the -00090 and -00091 conditions
11591                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11592                                 HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00090",
11593                                 "Cannot execute pending %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
11594                                 report_data->FormatHandle(sub_cb_state->commandBuffer).c_str());
11595             }
11596             // We use an const_cast, because one cannot query a container keyed on a non-const pointer using a const pointer
11597             if (cb_state->linkedCommandBuffers.count(const_cast<CMD_BUFFER_STATE *>(sub_cb_state))) {
11598                 skip |= log_msg(
11599                     report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11600                     HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00092",
11601                     "Cannot execute %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set if previously executed in %s",
11602                     report_data->FormatHandle(sub_cb_state->commandBuffer).c_str(),
11603                     report_data->FormatHandle(cb_state->commandBuffer).c_str());
11604             }
11605 
11606             const auto insert_pair = linked_command_buffers.insert(sub_cb_state);
11607             if (!insert_pair.second) {
11608                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11609                                 HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00093",
11610                                 "Cannot duplicate %s in pCommandBuffers without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
11611                                 report_data->FormatHandle(cb_state->commandBuffer).c_str());
11612             }
11613 
11614             if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
11615                 // Warn that non-simultaneous secondary cmd buffer renders primary non-simultaneous
11616                 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11617                                 HandleToUint64(pCommandBuffers[i]), kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse,
11618                                 "vkCmdExecuteCommands(): Secondary %s does not have "
11619                                 "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and will cause primary "
11620                                 "%s to be treated as if it does not have "
11621                                 "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, even though it does.",
11622                                 report_data->FormatHandle(pCommandBuffers[i]).c_str(),
11623                                 report_data->FormatHandle(cb_state->commandBuffer).c_str());
11624             }
11625         }
11626         if (!cb_state->activeQueries.empty() && !enabled_features.core.inheritedQueries) {
11627             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11628                             HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-commandBuffer-00101",
11629                             "vkCmdExecuteCommands(): Secondary %s cannot be submitted with a query in flight and "
11630                             "inherited queries not supported on this device.",
11631                             report_data->FormatHandle(pCommandBuffers[i]).c_str());
11632         }
11633         // Validate initial layout uses vs. the primary cmd buffer state
11634         // Novel Valid usage: "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001"
11635         // initial layout usage of secondary command buffers resources must match parent command buffer
11636         const auto *const_cb_state = static_cast<const CMD_BUFFER_STATE *>(cb_state);
11637         for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
11638             const auto image = sub_layout_map_entry.first;
11639             const auto *image_state = GetImageState(image);
11640             if (!image_state) continue;  // Can't set layouts of a dead image
11641 
11642             const auto *cb_subres_map = GetImageSubresourceLayoutMap(const_cb_state, image);
11643             // Const getter can be null in which case we have nothing to check against for this image...
11644             if (!cb_subres_map) continue;
11645 
11646             const auto &sub_cb_subres_map = sub_layout_map_entry.second;
11647             // Validate the initial_uses, that they match the current state of the primary cb, or absent a current state,
11648             // that the match any initial_layout.
11649             for (auto it_init = sub_cb_subres_map->BeginInitialUse(); !it_init.AtEnd(); ++it_init) {
11650                 const auto &sub_layout = (*it_init).layout;
11651                 if (VK_IMAGE_LAYOUT_UNDEFINED == sub_layout) continue;  // secondary doesn't care about current or initial
11652                 const auto &subresource = (*it_init).subresource;
11653                 // Look up the current layout (if any)
11654                 VkImageLayout cb_layout = cb_subres_map->GetSubresourceLayout(subresource);
11655                 const char *layout_type = "current";
11656                 if (cb_layout == kInvalidLayout) {
11657                     // Find initial layout (if any)
11658                     cb_layout = cb_subres_map->GetSubresourceInitialLayout(subresource);
11659                     layout_type = "initial";
11660                 }
11661                 if ((cb_layout != kInvalidLayout) && (cb_layout != sub_layout)) {
11662                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
11663                             HandleToUint64(pCommandBuffers[i]), "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001",
11664                             "%s: Executed secondary command buffer using %s (subresource: aspectMask 0x%X array layer %u, "
11665                             "mip level %u) which expects layout %s--instead, image %s layout is %s.",
11666                             "vkCmdExecuteCommands():", report_data->FormatHandle(image).c_str(), subresource.aspectMask,
11667                             subresource.arrayLayer, subresource.mipLevel, string_VkImageLayout(sub_layout), layout_type,
11668                             string_VkImageLayout(cb_layout));
11669                 }
11670             }
11671         }
11672     }
11673 
11674     skip |= ValidatePrimaryCommandBuffer(cb_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-bufferlevel");
11675     skip |= ValidateCmdQueueFlags(cb_state, "vkCmdExecuteCommands()",
11676                                   VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
11677                                   "VUID-vkCmdExecuteCommands-commandBuffer-cmdpool");
11678     skip |= ValidateCmd(cb_state, CMD_EXECUTECOMMANDS, "vkCmdExecuteCommands()");
11679     return skip;
11680 }
11681 
PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBuffersCount,const VkCommandBuffer * pCommandBuffers)11682 void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
11683                                                              const VkCommandBuffer *pCommandBuffers) {
11684     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
11685 
11686     CMD_BUFFER_STATE *sub_cb_state = NULL;
11687     for (uint32_t i = 0; i < commandBuffersCount; i++) {
11688         sub_cb_state = GetCBState(pCommandBuffers[i]);
11689         assert(sub_cb_state);
11690         if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
11691             if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
11692                 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
11693                 // from the validation step to the recording step
11694                 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
11695             }
11696         }
11697 
11698         // Propagate inital layout and current layout state to the primary cmd buffer
11699         // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
11700         // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
11701         // for those other classes.
11702         for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
11703             const auto image = sub_layout_map_entry.first;
11704             const auto *image_state = GetImageState(image);
11705             if (!image_state) continue;  // Can't set layouts of a dead image
11706 
11707             auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
11708             const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
11709             assert(cb_subres_map && sub_cb_subres_map);  // Non const get and map traversal should never be null
11710             cb_subres_map->UpdateFrom(*sub_cb_subres_map);
11711         }
11712 
11713         sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
11714         cb_state->linkedCommandBuffers.insert(sub_cb_state);
11715         sub_cb_state->linkedCommandBuffers.insert(cb_state);
11716         for (auto &function : sub_cb_state->queryUpdates) {
11717             cb_state->queryUpdates.push_back(function);
11718         }
11719         for (auto &function : sub_cb_state->queue_submit_functions) {
11720             cb_state->queue_submit_functions.push_back(function);
11721         }
11722     }
11723 }
11724 
PreCallValidateMapMemory(VkDevice device,VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size,VkFlags flags,void ** ppData)11725 bool CoreChecks::PreCallValidateMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
11726                                           VkFlags flags, void **ppData) {
11727     bool skip = false;
11728     DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
11729     if (mem_info) {
11730         if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
11731              VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) {
11732             skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11733                            HandleToUint64(mem), "VUID-vkMapMemory-memory-00682",
11734                            "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT set: %s.",
11735                            report_data->FormatHandle(mem).c_str());
11736         }
11737     }
11738     skip |= ValidateMapMemRange(mem, offset, size);
11739     return skip;
11740 }
11741 
PostCallRecordMapMemory(VkDevice device,VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size,VkFlags flags,void ** ppData,VkResult result)11742 void CoreChecks::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
11743                                          void **ppData, VkResult result) {
11744     if (VK_SUCCESS != result) return;
11745     // TODO : What's the point of this range? See comment on creating new "bound_range" above, which may replace this
11746     StoreMemRanges(mem, offset, size);
11747     InitializeAndTrackMemory(mem, offset, size, ppData);
11748 }
11749 
PreCallValidateUnmapMemory(VkDevice device,VkDeviceMemory mem)11750 bool CoreChecks::PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory mem) {
11751     bool skip = false;
11752     auto mem_info = GetDevMemState(mem);
11753     if (mem_info && !mem_info->mem_range.size) {
11754         // Valid Usage: memory must currently be mapped
11755         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11756                         HandleToUint64(mem), "VUID-vkUnmapMemory-memory-00689", "Unmapping Memory without memory being mapped: %s.",
11757                         report_data->FormatHandle(mem).c_str());
11758     }
11759     return skip;
11760 }
11761 
PreCallRecordUnmapMemory(VkDevice device,VkDeviceMemory mem)11762 void CoreChecks::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
11763     auto mem_info = GetDevMemState(mem);
11764     mem_info->mem_range.size = 0;
11765     if (mem_info->shadow_copy) {
11766         free(mem_info->shadow_copy_base);
11767         mem_info->shadow_copy_base = 0;
11768         mem_info->shadow_copy = 0;
11769     }
11770 }
11771 
ValidateMemoryIsMapped(const char * funcName,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)11772 bool CoreChecks::ValidateMemoryIsMapped(const char *funcName, uint32_t memRangeCount, const VkMappedMemoryRange *pMemRanges) {
11773     bool skip = false;
11774     for (uint32_t i = 0; i < memRangeCount; ++i) {
11775         auto mem_info = GetDevMemState(pMemRanges[i].memory);
11776         if (mem_info) {
11777             if (pMemRanges[i].size == VK_WHOLE_SIZE) {
11778                 if (mem_info->mem_range.offset > pMemRanges[i].offset) {
11779                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11780                                     HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00686",
11781                                     "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER
11782                                     ") is less than Memory Object's offset (" PRINTF_SIZE_T_SPECIFIER ").",
11783                                     funcName, static_cast<size_t>(pMemRanges[i].offset),
11784                                     static_cast<size_t>(mem_info->mem_range.offset));
11785                 }
11786             } else {
11787                 const uint64_t data_end = (mem_info->mem_range.size == VK_WHOLE_SIZE)
11788                                               ? mem_info->alloc_info.allocationSize
11789                                               : (mem_info->mem_range.offset + mem_info->mem_range.size);
11790                 if ((mem_info->mem_range.offset > pMemRanges[i].offset) ||
11791                     (data_end < (pMemRanges[i].offset + pMemRanges[i].size))) {
11792                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11793                                     HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00685",
11794                                     "%s: Flush/Invalidate size or offset (" PRINTF_SIZE_T_SPECIFIER ", " PRINTF_SIZE_T_SPECIFIER
11795                                     ") exceed the Memory Object's upper-bound (" PRINTF_SIZE_T_SPECIFIER ").",
11796                                     funcName, static_cast<size_t>(pMemRanges[i].offset + pMemRanges[i].size),
11797                                     static_cast<size_t>(pMemRanges[i].offset), static_cast<size_t>(data_end));
11798                 }
11799             }
11800         }
11801     }
11802     return skip;
11803 }
11804 
ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_count,const VkMappedMemoryRange * mem_ranges)11805 bool CoreChecks::ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_count, const VkMappedMemoryRange *mem_ranges) {
11806     bool skip = false;
11807     for (uint32_t i = 0; i < mem_range_count; ++i) {
11808         auto mem_info = GetDevMemState(mem_ranges[i].memory);
11809         if (mem_info) {
11810             if (mem_info->shadow_copy) {
11811                 VkDeviceSize size = (mem_info->mem_range.size != VK_WHOLE_SIZE)
11812                                         ? mem_info->mem_range.size
11813                                         : (mem_info->alloc_info.allocationSize - mem_info->mem_range.offset);
11814                 char *data = static_cast<char *>(mem_info->shadow_copy);
11815                 for (uint64_t j = 0; j < mem_info->shadow_pad_size; ++j) {
11816                     if (data[j] != NoncoherentMemoryFillValue) {
11817                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11818                                         HandleToUint64(mem_ranges[i].memory), kVUID_Core_MemTrack_InvalidMap,
11819                                         "Memory underflow was detected on %s.",
11820                                         report_data->FormatHandle(mem_ranges[i].memory).c_str());
11821                     }
11822                 }
11823                 for (uint64_t j = (size + mem_info->shadow_pad_size); j < (2 * mem_info->shadow_pad_size + size); ++j) {
11824                     if (data[j] != NoncoherentMemoryFillValue) {
11825                         skip |=
11826                             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11827                                     HandleToUint64(mem_ranges[i].memory), kVUID_Core_MemTrack_InvalidMap,
11828                                     "Memory overflow was detected on %s.", report_data->FormatHandle(mem_ranges[i].memory).c_str());
11829                     }
11830                 }
11831                 memcpy(mem_info->p_driver_data, static_cast<void *>(data + mem_info->shadow_pad_size), (size_t)(size));
11832             }
11833         }
11834     }
11835     return skip;
11836 }
11837 
CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count,const VkMappedMemoryRange * mem_ranges)11838 void CoreChecks::CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count, const VkMappedMemoryRange *mem_ranges) {
11839     for (uint32_t i = 0; i < mem_range_count; ++i) {
11840         auto mem_info = GetDevMemState(mem_ranges[i].memory);
11841         if (mem_info && mem_info->shadow_copy) {
11842             VkDeviceSize size = (mem_info->mem_range.size != VK_WHOLE_SIZE)
11843                                     ? mem_info->mem_range.size
11844                                     : (mem_info->alloc_info.allocationSize - mem_ranges[i].offset);
11845             char *data = static_cast<char *>(mem_info->shadow_copy);
11846             memcpy(data + mem_info->shadow_pad_size, mem_info->p_driver_data, (size_t)(size));
11847         }
11848     }
11849 }
11850 
ValidateMappedMemoryRangeDeviceLimits(const char * func_name,uint32_t mem_range_count,const VkMappedMemoryRange * mem_ranges)11851 bool CoreChecks::ValidateMappedMemoryRangeDeviceLimits(const char *func_name, uint32_t mem_range_count,
11852                                                        const VkMappedMemoryRange *mem_ranges) {
11853     bool skip = false;
11854     for (uint32_t i = 0; i < mem_range_count; ++i) {
11855         uint64_t atom_size = phys_dev_props.limits.nonCoherentAtomSize;
11856         if (SafeModulo(mem_ranges[i].offset, atom_size) != 0) {
11857             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11858                             HandleToUint64(mem_ranges->memory), "VUID-VkMappedMemoryRange-offset-00687",
11859                             "%s: Offset in pMemRanges[%d] is 0x%" PRIxLEAST64
11860                             ", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
11861                             func_name, i, mem_ranges[i].offset, atom_size);
11862         }
11863         auto mem_info = GetDevMemState(mem_ranges[i].memory);
11864         if ((mem_ranges[i].size != VK_WHOLE_SIZE) &&
11865             (mem_ranges[i].size + mem_ranges[i].offset != mem_info->alloc_info.allocationSize) &&
11866             (SafeModulo(mem_ranges[i].size, atom_size) != 0)) {
11867             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11868                             HandleToUint64(mem_ranges->memory), "VUID-VkMappedMemoryRange-size-01390",
11869                             "%s: Size in pMemRanges[%d] is 0x%" PRIxLEAST64
11870                             ", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
11871                             func_name, i, mem_ranges[i].size, atom_size);
11872         }
11873     }
11874     return skip;
11875 }
11876 
PreCallValidateFlushMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)11877 bool CoreChecks::PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
11878                                                         const VkMappedMemoryRange *pMemRanges) {
11879     bool skip = false;
11880     skip |= ValidateMappedMemoryRangeDeviceLimits("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
11881     skip |= ValidateAndCopyNoncoherentMemoryToDriver(memRangeCount, pMemRanges);
11882     skip |= ValidateMemoryIsMapped("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
11883     return skip;
11884 }
11885 
PreCallValidateInvalidateMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)11886 bool CoreChecks::PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
11887                                                              const VkMappedMemoryRange *pMemRanges) {
11888     bool skip = false;
11889     skip |= ValidateMappedMemoryRangeDeviceLimits("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
11890     skip |= ValidateMemoryIsMapped("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
11891     return skip;
11892 }
11893 
PostCallRecordInvalidateMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges,VkResult result)11894 void CoreChecks::PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
11895                                                             const VkMappedMemoryRange *pMemRanges, VkResult result) {
11896     if (VK_SUCCESS == result) {
11897         // Update our shadow copy with modified driver data
11898         CopyNoncoherentMemoryFromDriver(memRangeCount, pMemRanges);
11899     }
11900 }
11901 
PreCallValidateGetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory mem,VkDeviceSize * pCommittedMem)11902 bool CoreChecks::PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory mem, VkDeviceSize *pCommittedMem) {
11903     bool skip = false;
11904     const auto mem_info = GetDevMemState(mem);
11905 
11906     if (mem_info) {
11907         if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
11908              VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) == 0) {
11909             skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
11910                            HandleToUint64(mem), "VUID-vkGetDeviceMemoryCommitment-memory-00690",
11911                            "Querying commitment for memory without VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT set: %s.",
11912                            report_data->FormatHandle(mem).c_str());
11913         }
11914     }
11915     return skip;
11916 }
11917 
ValidateBindImageMemory(const VkBindImageMemoryInfo & bindInfo,const char * api_name) const11918 bool CoreChecks::ValidateBindImageMemory(const VkBindImageMemoryInfo &bindInfo, const char *api_name) const {
11919     bool skip = false;
11920     const IMAGE_STATE *image_state = GetImageState(bindInfo.image);
11921     if (image_state) {
11922         // Track objects tied to memory
11923         uint64_t image_handle = HandleToUint64(bindInfo.image);
11924         skip = ValidateSetMemBinding(bindInfo.memory, VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage), api_name);
11925 #ifdef VK_USE_PLATFORM_ANDROID_KHR
11926         if (image_state->external_format_android) {
11927             if (image_state->memory_requirements_checked) {
11928                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
11929                                 kVUID_Core_BindImage_InvalidMemReqQuery,
11930                                 "%s: Must not call vkGetImageMemoryRequirements on %s that will be bound to an external "
11931                                 "Android hardware buffer.",
11932                                 api_name, report_data->FormatHandle(bindInfo.image).c_str());
11933             }
11934             return skip;
11935         }
11936 #endif  // VK_USE_PLATFORM_ANDROID_KHR
11937         if (!image_state->memory_requirements_checked) {
11938             // There's not an explicit requirement in the spec to call vkGetImageMemoryRequirements() prior to calling
11939             // BindImageMemory but it's implied in that memory being bound must conform with VkMemoryRequirements from
11940             // vkGetImageMemoryRequirements()
11941             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
11942                             kVUID_Core_BindImage_NoMemReqQuery,
11943                             "%s: Binding memory to %s but vkGetImageMemoryRequirements() has not been called on that image.",
11944                             api_name, report_data->FormatHandle(bindInfo.image).c_str());
11945             // Use this information fetched at CreateImage time, in validation below.
11946         }
11947 
11948         // Validate bound memory range information
11949         const auto mem_info = GetDevMemState(bindInfo.memory);
11950         if (mem_info) {
11951             skip |= ValidateInsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
11952                                                    image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR, api_name);
11953             skip |= ValidateMemoryTypes(mem_info, image_state->requirements.memoryTypeBits, api_name,
11954                                         "VUID-vkBindImageMemory-memory-01047");
11955         }
11956 
11957         // Validate memory requirements alignment
11958         if (SafeModulo(bindInfo.memoryOffset, image_state->requirements.alignment) != 0) {
11959             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
11960                             "VUID-vkBindImageMemory-memoryOffset-01048",
11961                             "%s: memoryOffset is 0x%" PRIxLEAST64
11962                             " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
11963                             ", returned from a call to vkGetImageMemoryRequirements with image.",
11964                             api_name, bindInfo.memoryOffset, image_state->requirements.alignment);
11965         }
11966 
11967         if (mem_info) {
11968             // Validate memory requirements size
11969             if (image_state->requirements.size > mem_info->alloc_info.allocationSize - bindInfo.memoryOffset) {
11970                 skip |=
11971                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
11972                             "VUID-vkBindImageMemory-size-01049",
11973                             "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
11974                             " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
11975                             ", returned from a call to vkGetImageMemoryRequirements with image.",
11976                             api_name, mem_info->alloc_info.allocationSize - bindInfo.memoryOffset, image_state->requirements.size);
11977             }
11978 
11979             // Validate dedicated allocation
11980             if (mem_info->is_dedicated && ((mem_info->dedicated_image != bindInfo.image) || (bindInfo.memoryOffset != 0))) {
11981                 // TODO: Add vkBindImageMemory2KHR error message when added to spec.
11982                 auto validation_error = kVUIDUndefined;
11983                 if (strcmp(api_name, "vkBindImageMemory()") == 0) {
11984                     validation_error = "VUID-vkBindImageMemory-memory-01509";
11985                 }
11986                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
11987                                 validation_error,
11988                                 "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR:: %s must be equal "
11989                                 "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
11990                                 api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
11991                                 report_data->FormatHandle(mem_info->dedicated_image).c_str(),
11992                                 report_data->FormatHandle(bindInfo.image).c_str(), bindInfo.memoryOffset);
11993             }
11994         }
11995 
11996         const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
11997         if (swapchain_info) {
11998             if (bindInfo.memory != VK_NULL_HANDLE) {
11999                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
12000                                 "VUID-VkBindImageMemoryInfo-pNext-01631", "%s: %s is not VK_NULL_HANDLE.", api_name,
12001                                 report_data->FormatHandle(bindInfo.memory).c_str());
12002             }
12003             const auto swapchain_state = GetSwapchainState(swapchain_info->swapchain);
12004             if (swapchain_state && swapchain_state->images.size() <= swapchain_info->imageIndex) {
12005                 skip |=
12006                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
12007                             "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644",
12008                             "%s: imageIndex (%i) is out of bounds of %s images (size: %i)", api_name, swapchain_info->imageIndex,
12009                             report_data->FormatHandle(swapchain_info->swapchain).c_str(), (int)swapchain_state->images.size());
12010             }
12011         } else {
12012             if (image_state->create_from_swapchain) {
12013                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
12014                                 "VUID-VkBindImageMemoryInfo-image-01630",
12015                                 "%s: pNext of VkBindImageMemoryInfo doesn't include VkBindImageMemorySwapchainInfoKHR.", api_name);
12016             }
12017             if (!mem_info) {
12018                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
12019                                 "VUID-VkBindImageMemoryInfo-pNext-01632", "%s: %s is invalid.", api_name,
12020                                 report_data->FormatHandle(bindInfo.memory).c_str());
12021             }
12022         }
12023     }
12024     return skip;
12025 }
12026 
PreCallValidateBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory mem,VkDeviceSize memoryOffset)12027 bool CoreChecks::PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
12028     VkBindImageMemoryInfo bindInfo = {};
12029     bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
12030     bindInfo.image = image;
12031     bindInfo.memory = mem;
12032     bindInfo.memoryOffset = memoryOffset;
12033     return ValidateBindImageMemory(bindInfo, "vkBindImageMemory()");
12034 }
12035 
UpdateBindImageMemoryState(const VkBindImageMemoryInfo & bindInfo)12036 void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
12037     IMAGE_STATE *image_state = GetImageState(bindInfo.image);
12038     if (image_state) {
12039         const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
12040         if (swapchain_info) {
12041             image_state->bind_swapchain = swapchain_info->swapchain;
12042             image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
12043         } else {
12044             // Track bound memory range information
12045             auto mem_info = GetDevMemState(bindInfo.memory);
12046             if (mem_info) {
12047                 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
12048                                        image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
12049             }
12050 
12051             // Track objects tied to memory
12052             SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
12053                           VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
12054         }
12055     }
12056 }
12057 
PostCallRecordBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory mem,VkDeviceSize memoryOffset,VkResult result)12058 void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
12059                                                            VkDeviceSize memoryOffset, VkResult result) {
12060     if (VK_SUCCESS != result) return;
12061     VkBindImageMemoryInfo bindInfo = {};
12062     bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
12063     bindInfo.image = image;
12064     bindInfo.memory = mem;
12065     bindInfo.memoryOffset = memoryOffset;
12066     UpdateBindImageMemoryState(bindInfo);
12067 }
12068 
PreCallValidateBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfoKHR * pBindInfos)12069 bool CoreChecks::PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
12070                                                  const VkBindImageMemoryInfoKHR *pBindInfos) {
12071     bool skip = false;
12072     char api_name[128];
12073     for (uint32_t i = 0; i < bindInfoCount; i++) {
12074         sprintf(api_name, "vkBindImageMemory2() pBindInfos[%u]", i);
12075         skip |= ValidateBindImageMemory(pBindInfos[i], api_name);
12076     }
12077     return skip;
12078 }
12079 
PreCallValidateBindImageMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfoKHR * pBindInfos)12080 bool CoreChecks::PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
12081                                                     const VkBindImageMemoryInfoKHR *pBindInfos) {
12082     bool skip = false;
12083     char api_name[128];
12084     for (uint32_t i = 0; i < bindInfoCount; i++) {
12085         sprintf(api_name, "vkBindImageMemory2KHR() pBindInfos[%u]", i);
12086         skip |= ValidateBindImageMemory(pBindInfos[i], api_name);
12087     }
12088     return skip;
12089 }
12090 
PostCallRecordBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfoKHR * pBindInfos,VkResult result)12091 void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
12092                                                             const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
12093     if (VK_SUCCESS != result) return;
12094     for (uint32_t i = 0; i < bindInfoCount; i++) {
12095         UpdateBindImageMemoryState(pBindInfos[i]);
12096     }
12097 }
12098 
PostCallRecordBindImageMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfoKHR * pBindInfos,VkResult result)12099 void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
12100                                                                const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
12101     if (VK_SUCCESS != result) return;
12102     for (uint32_t i = 0; i < bindInfoCount; i++) {
12103         UpdateBindImageMemoryState(pBindInfos[i]);
12104     }
12105 }
12106 
PreCallValidateSetEvent(VkDevice device,VkEvent event)12107 bool CoreChecks::PreCallValidateSetEvent(VkDevice device, VkEvent event) {
12108     bool skip = false;
12109     auto event_state = GetEventState(event);
12110     if (event_state) {
12111         if (event_state->write_in_use) {
12112             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
12113                             HandleToUint64(event), kVUID_Core_DrawState_QueueForwardProgress,
12114                             "Cannot call vkSetEvent() on %s that is already in use by a command buffer.",
12115                             report_data->FormatHandle(event).c_str());
12116         }
12117     }
12118     return skip;
12119 }
12120 
PreCallRecordSetEvent(VkDevice device,VkEvent event)12121 void CoreChecks::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
12122     auto event_state = GetEventState(event);
12123     if (event_state) {
12124         event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
12125     }
12126     // Host setting event is visible to all queues immediately so update stageMask for any queue that's seen this event
12127     // TODO : For correctness this needs separate fix to verify that app doesn't make incorrect assumptions about the
12128     // ordering of this command in relation to vkCmd[Set|Reset]Events (see GH297)
12129     for (auto queue_data : queueMap) {
12130         auto event_entry = queue_data.second.eventToStageMap.find(event);
12131         if (event_entry != queue_data.second.eventToStageMap.end()) {
12132             event_entry->second |= VK_PIPELINE_STAGE_HOST_BIT;
12133         }
12134     }
12135 }
12136 
PreCallValidateQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)12137 bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
12138                                                 VkFence fence) {
12139     auto queue_data = GetQueueState(queue);
12140     auto pFence = GetFenceState(fence);
12141     bool skip = ValidateFenceForSubmit(pFence);
12142     if (skip) {
12143         return true;
12144     }
12145 
12146     auto queueFlags = GetPhysicalDeviceState()->queue_family_properties[queue_data->queueFamilyIndex].queueFlags;
12147     if (!(queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
12148         skip |= log_msg(
12149             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
12150             "VUID-vkQueueBindSparse-queuetype",
12151             "Attempting vkQueueBindSparse on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set.");
12152     }
12153 
12154     unordered_set<VkSemaphore> signaled_semaphores;
12155     unordered_set<VkSemaphore> unsignaled_semaphores;
12156     unordered_set<VkSemaphore> internal_semaphores;
12157     for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
12158         const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
12159 
12160         std::vector<SEMAPHORE_WAIT> semaphore_waits;
12161         std::vector<VkSemaphore> semaphore_signals;
12162         for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
12163             VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
12164             auto pSemaphore = GetSemaphoreState(semaphore);
12165             if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
12166                 if (unsignaled_semaphores.count(semaphore) ||
12167                     (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
12168                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
12169                                     HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
12170                                     "%s is waiting on %s that has no way to be signaled.", report_data->FormatHandle(queue).c_str(),
12171                                     report_data->FormatHandle(semaphore).c_str());
12172                 } else {
12173                     signaled_semaphores.erase(semaphore);
12174                     unsignaled_semaphores.insert(semaphore);
12175                 }
12176             }
12177             if (pSemaphore && pSemaphore->scope == kSyncScopeExternalTemporary) {
12178                 internal_semaphores.insert(semaphore);
12179             }
12180         }
12181         for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
12182             VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
12183             auto pSemaphore = GetSemaphoreState(semaphore);
12184             if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
12185                 if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
12186                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
12187                                     HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
12188                                     "%s is signaling %s that was previously signaled by %s but has not since "
12189                                     "been waited on by any queue.",
12190                                     report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str(),
12191                                     report_data->FormatHandle(pSemaphore->signaler.first).c_str());
12192                 } else {
12193                     unsignaled_semaphores.erase(semaphore);
12194                     signaled_semaphores.insert(semaphore);
12195                 }
12196             }
12197         }
12198         // Store sparse binding image_state and after binding is complete make sure that any requiring metadata have it bound
12199         std::unordered_set<IMAGE_STATE *> sparse_images;
12200         // If we're binding sparse image memory make sure reqs were queried and note if metadata is required and bound
12201         for (uint32_t i = 0; i < bindInfo.imageBindCount; ++i) {
12202             const auto &image_bind = bindInfo.pImageBinds[i];
12203             auto image_state = GetImageState(image_bind.image);
12204             if (!image_state)
12205                 continue;  // Param/Object validation should report image_bind.image handles being invalid, so just skip here.
12206             sparse_images.insert(image_state);
12207             if (image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
12208                 if (!image_state->get_sparse_reqs_called || image_state->sparse_requirements.empty()) {
12209                     // For now just warning if sparse image binding occurs without calling to get reqs first
12210                     return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
12211                                    HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
12212                                    "vkQueueBindSparse(): Binding sparse memory to %s without first calling "
12213                                    "vkGetImageSparseMemoryRequirements[2KHR]() to retrieve requirements.",
12214                                    report_data->FormatHandle(image_state->image).c_str());
12215                 }
12216             }
12217             if (!image_state->memory_requirements_checked) {
12218                 // For now just warning if sparse image binding occurs without calling to get reqs first
12219                 return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
12220                                HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
12221                                "vkQueueBindSparse(): Binding sparse memory to %s without first calling "
12222                                "vkGetImageMemoryRequirements() to retrieve requirements.",
12223                                report_data->FormatHandle(image_state->image).c_str());
12224             }
12225         }
12226         for (uint32_t i = 0; i < bindInfo.imageOpaqueBindCount; ++i) {
12227             const auto &image_opaque_bind = bindInfo.pImageOpaqueBinds[i];
12228             auto image_state = GetImageState(bindInfo.pImageOpaqueBinds[i].image);
12229             if (!image_state)
12230                 continue;  // Param/Object validation should report image_bind.image handles being invalid, so just skip here.
12231             sparse_images.insert(image_state);
12232             if (image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
12233                 if (!image_state->get_sparse_reqs_called || image_state->sparse_requirements.empty()) {
12234                     // For now just warning if sparse image binding occurs without calling to get reqs first
12235                     return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
12236                                    HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
12237                                    "vkQueueBindSparse(): Binding opaque sparse memory to %s without first calling "
12238                                    "vkGetImageSparseMemoryRequirements[2KHR]() to retrieve requirements.",
12239                                    report_data->FormatHandle(image_state->image).c_str());
12240                 }
12241             }
12242             if (!image_state->memory_requirements_checked) {
12243                 // For now just warning if sparse image binding occurs without calling to get reqs first
12244                 return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
12245                                HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
12246                                "vkQueueBindSparse(): Binding opaque sparse memory to %s without first calling "
12247                                "vkGetImageMemoryRequirements() to retrieve requirements.",
12248                                report_data->FormatHandle(image_state->image).c_str());
12249             }
12250             for (uint32_t j = 0; j < image_opaque_bind.bindCount; ++j) {
12251                 if (image_opaque_bind.pBinds[j].flags & VK_SPARSE_MEMORY_BIND_METADATA_BIT) {
12252                     image_state->sparse_metadata_bound = true;
12253                 }
12254             }
12255         }
12256         for (const auto &sparse_image_state : sparse_images) {
12257             if (sparse_image_state->sparse_metadata_required && !sparse_image_state->sparse_metadata_bound) {
12258                 // Warn if sparse image binding metadata required for image with sparse binding, but metadata not bound
12259                 return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
12260                                HandleToUint64(sparse_image_state->image), kVUID_Core_MemTrack_InvalidState,
12261                                "vkQueueBindSparse(): Binding sparse memory to %s which requires a metadata aspect but no "
12262                                "binding with VK_SPARSE_MEMORY_BIND_METADATA_BIT set was made.",
12263                                report_data->FormatHandle(sparse_image_state->image).c_str());
12264             }
12265         }
12266     }
12267 
12268     return skip;
12269 }
PostCallRecordQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence,VkResult result)12270 void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
12271                                                VkFence fence, VkResult result) {
12272     if (result != VK_SUCCESS) return;
12273     uint64_t early_retire_seq = 0;
12274     auto pFence = GetFenceState(fence);
12275     auto pQueue = GetQueueState(queue);
12276 
12277     if (pFence) {
12278         if (pFence->scope == kSyncScopeInternal) {
12279             SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
12280             if (!bindInfoCount) {
12281                 // No work to do, just dropping a fence in the queue by itself.
12282                 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
12283                                                  std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
12284             }
12285         } else {
12286             // Retire work up until this fence early, we will not see the wait that corresponds to this signal
12287             early_retire_seq = pQueue->seq + pQueue->submissions.size();
12288             if (!external_sync_warning) {
12289                 external_sync_warning = true;
12290                 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
12291                         kVUID_Core_DrawState_QueueForwardProgress,
12292                         "vkQueueBindSparse(): Signaling external %s on %s will disable validation of preceding command "
12293                         "buffer lifecycle states and the in-use status of associated objects.",
12294                         report_data->FormatHandle(fence).c_str(), report_data->FormatHandle(queue).c_str());
12295             }
12296         }
12297     }
12298 
12299     for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
12300         const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
12301         // Track objects tied to memory
12302         for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
12303             for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
12304                 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
12305                 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
12306                                     VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
12307             }
12308         }
12309         for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
12310             for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
12311                 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
12312                 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
12313                                     VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
12314             }
12315         }
12316         for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
12317             for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
12318                 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
12319                 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
12320                 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
12321                 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
12322                                     VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
12323             }
12324         }
12325 
12326         std::vector<SEMAPHORE_WAIT> semaphore_waits;
12327         std::vector<VkSemaphore> semaphore_signals;
12328         std::vector<VkSemaphore> semaphore_externals;
12329         for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
12330             VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
12331             auto pSemaphore = GetSemaphoreState(semaphore);
12332             if (pSemaphore) {
12333                 if (pSemaphore->scope == kSyncScopeInternal) {
12334                     if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
12335                         semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
12336                         pSemaphore->in_use.fetch_add(1);
12337                     }
12338                     pSemaphore->signaler.first = VK_NULL_HANDLE;
12339                     pSemaphore->signaled = false;
12340                 } else {
12341                     semaphore_externals.push_back(semaphore);
12342                     pSemaphore->in_use.fetch_add(1);
12343                     if (pSemaphore->scope == kSyncScopeExternalTemporary) {
12344                         pSemaphore->scope = kSyncScopeInternal;
12345                     }
12346                 }
12347             }
12348         }
12349         for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
12350             VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
12351             auto pSemaphore = GetSemaphoreState(semaphore);
12352             if (pSemaphore) {
12353                 if (pSemaphore->scope == kSyncScopeInternal) {
12354                     pSemaphore->signaler.first = queue;
12355                     pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
12356                     pSemaphore->signaled = true;
12357                     pSemaphore->in_use.fetch_add(1);
12358                     semaphore_signals.push_back(semaphore);
12359                 } else {
12360                     // Retire work up until this submit early, we will not see the wait that corresponds to this signal
12361                     early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
12362                     if (!external_sync_warning) {
12363                         external_sync_warning = true;
12364                         log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
12365                                 HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
12366                                 "vkQueueBindSparse(): Signaling external %s on %s will disable validation of "
12367                                 "preceding command buffer lifecycle states and the in-use status of associated objects.",
12368                                 report_data->FormatHandle(semaphore).c_str(), report_data->FormatHandle(queue).c_str());
12369                     }
12370                 }
12371             }
12372         }
12373 
12374         pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
12375                                          bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
12376     }
12377 
12378     if (early_retire_seq) {
12379         RetireWorkOnQueue(pQueue, early_retire_seq, true);
12380     }
12381 }
12382 
PostCallRecordCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore,VkResult result)12383 void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
12384                                                            const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
12385                                                            VkResult result) {
12386     if (VK_SUCCESS != result) return;
12387     std::unique_ptr<SEMAPHORE_STATE> semaphore_state(new SEMAPHORE_STATE{});
12388     semaphore_state->signaler.first = VK_NULL_HANDLE;
12389     semaphore_state->signaler.second = 0;
12390     semaphore_state->signaled = false;
12391     semaphore_state->scope = kSyncScopeInternal;
12392     semaphoreMap[*pSemaphore] = std::move(semaphore_state);
12393 }
12394 
ValidateImportSemaphore(VkSemaphore semaphore,const char * caller_name)12395 bool CoreChecks::ValidateImportSemaphore(VkSemaphore semaphore, const char *caller_name) {
12396     bool skip = false;
12397     SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
12398     if (sema_node) {
12399         const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
12400         skip |= ValidateObjectNotInUse(sema_node, obj_struct, caller_name, kVUIDUndefined);
12401     }
12402     return skip;
12403 }
12404 
RecordImportSemaphoreState(VkSemaphore semaphore,VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,VkSemaphoreImportFlagsKHR flags)12405 void CoreChecks::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
12406                                             VkSemaphoreImportFlagsKHR flags) {
12407     SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
12408     if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
12409         if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
12410             sema_node->scope == kSyncScopeInternal) {
12411             sema_node->scope = kSyncScopeExternalTemporary;
12412         } else {
12413             sema_node->scope = kSyncScopeExternalPermanent;
12414         }
12415     }
12416 }
12417 
12418 #ifdef VK_USE_PLATFORM_WIN32_KHR
PreCallValidateImportSemaphoreWin32HandleKHR(VkDevice device,const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo)12419 bool CoreChecks::PreCallValidateImportSemaphoreWin32HandleKHR(
12420     VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo) {
12421     return ValidateImportSemaphore(pImportSemaphoreWin32HandleInfo->semaphore, "vkImportSemaphoreWin32HandleKHR");
12422 }
12423 
PostCallRecordImportSemaphoreWin32HandleKHR(VkDevice device,const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,VkResult result)12424 void CoreChecks::PostCallRecordImportSemaphoreWin32HandleKHR(
12425     VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
12426     if (VK_SUCCESS != result) return;
12427     RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
12428                                pImportSemaphoreWin32HandleInfo->flags);
12429 }
12430 #endif  // VK_USE_PLATFORM_WIN32_KHR
12431 
PreCallValidateImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)12432 bool CoreChecks::PreCallValidateImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo) {
12433     return ValidateImportSemaphore(pImportSemaphoreFdInfo->semaphore, "vkImportSemaphoreFdKHR");
12434 }
12435 
PostCallRecordImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,VkResult result)12436 void CoreChecks::PostCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
12437                                                     VkResult result) {
12438     if (VK_SUCCESS != result) return;
12439     RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
12440                                pImportSemaphoreFdInfo->flags);
12441 }
12442 
RecordGetExternalSemaphoreState(VkSemaphore semaphore,VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type)12443 void CoreChecks::RecordGetExternalSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
12444     SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
12445     if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
12446         // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
12447         semaphore_state->scope = kSyncScopeExternalPermanent;
12448     }
12449 }
12450 
12451 #ifdef VK_USE_PLATFORM_WIN32_KHR
PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,VkResult result)12452 void CoreChecks::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
12453                                                           const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
12454                                                           HANDLE *pHandle, VkResult result) {
12455     if (VK_SUCCESS != result) return;
12456     RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
12457 }
12458 #endif
12459 
PostCallRecordGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,VkResult result)12460 void CoreChecks::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
12461                                                  VkResult result) {
12462     if (VK_SUCCESS != result) return;
12463     RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
12464 }
12465 
ValidateImportFence(VkFence fence,const char * caller_name)12466 bool CoreChecks::ValidateImportFence(VkFence fence, const char *caller_name) {
12467     FENCE_STATE *fence_node = GetFenceState(fence);
12468     bool skip = false;
12469     if (fence_node && fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
12470         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
12471                         kVUIDUndefined, "Cannot call %s on %s that is currently in use.", caller_name,
12472                         report_data->FormatHandle(fence).c_str());
12473     }
12474     return skip;
12475 }
12476 
RecordImportFenceState(VkFence fence,VkExternalFenceHandleTypeFlagBitsKHR handle_type,VkFenceImportFlagsKHR flags)12477 void CoreChecks::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
12478                                         VkFenceImportFlagsKHR flags) {
12479     FENCE_STATE *fence_node = GetFenceState(fence);
12480     if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
12481         if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
12482             fence_node->scope == kSyncScopeInternal) {
12483             fence_node->scope = kSyncScopeExternalTemporary;
12484         } else {
12485             fence_node->scope = kSyncScopeExternalPermanent;
12486         }
12487     }
12488 }
12489 
12490 #ifdef VK_USE_PLATFORM_WIN32_KHR
PreCallValidateImportFenceWin32HandleKHR(VkDevice device,const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo)12491 bool CoreChecks::PreCallValidateImportFenceWin32HandleKHR(VkDevice device,
12492                                                           const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo) {
12493     return ValidateImportFence(pImportFenceWin32HandleInfo->fence, "vkImportFenceWin32HandleKHR");
12494 }
PostCallRecordImportFenceWin32HandleKHR(VkDevice device,const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,VkResult result)12495 void CoreChecks::PostCallRecordImportFenceWin32HandleKHR(VkDevice device,
12496                                                          const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo,
12497                                                          VkResult result) {
12498     if (VK_SUCCESS != result) return;
12499     RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
12500                            pImportFenceWin32HandleInfo->flags);
12501 }
12502 #endif  // VK_USE_PLATFORM_WIN32_KHR
12503 
PreCallValidateImportFenceFdKHR(VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo)12504 bool CoreChecks::PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo) {
12505     return ValidateImportFence(pImportFenceFdInfo->fence, "vkImportFenceFdKHR");
12506 }
PostCallRecordImportFenceFdKHR(VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo,VkResult result)12507 void CoreChecks::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
12508                                                 VkResult result) {
12509     if (VK_SUCCESS != result) return;
12510     RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
12511 }
12512 
RecordGetExternalFenceState(VkFence fence,VkExternalFenceHandleTypeFlagBitsKHR handle_type)12513 void CoreChecks::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
12514     FENCE_STATE *fence_state = GetFenceState(fence);
12515     if (fence_state) {
12516         if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
12517             // Export with reference transference becomes external
12518             fence_state->scope = kSyncScopeExternalPermanent;
12519         } else if (fence_state->scope == kSyncScopeInternal) {
12520             // Export with copy transference has a side effect of resetting the fence
12521             fence_state->state = FENCE_UNSIGNALED;
12522         }
12523     }
12524 }
12525 
12526 #ifdef VK_USE_PLATFORM_WIN32_KHR
PostCallRecordGetFenceWin32HandleKHR(VkDevice device,const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,VkResult result)12527 void CoreChecks::PostCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
12528                                                       HANDLE *pHandle, VkResult result) {
12529     if (VK_SUCCESS != result) return;
12530     RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
12531 }
12532 #endif
12533 
PostCallRecordGetFenceFdKHR(VkDevice device,const VkFenceGetFdInfoKHR * pGetFdInfo,int * pFd,VkResult result)12534 void CoreChecks::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd, VkResult result) {
12535     if (VK_SUCCESS != result) return;
12536     RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
12537 }
12538 
PostCallRecordCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent,VkResult result)12539 void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
12540                                                        const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
12541     if (VK_SUCCESS != result) return;
12542     eventMap[*pEvent].write_in_use = 0;
12543     eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
12544 }
12545 
ValidateCreateSwapchain(const char * func_name,VkSwapchainCreateInfoKHR const * pCreateInfo,const SURFACE_STATE * surface_state,const SWAPCHAIN_NODE * old_swapchain_state) const12546 bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreateInfoKHR const *pCreateInfo,
12547                                          const SURFACE_STATE *surface_state, const SWAPCHAIN_NODE *old_swapchain_state) const {
12548     // All physical devices and queue families are required to be able to present to any native window on Android; require the
12549     // application to have established support on any other platform.
12550     if (!instance_extensions.vk_khr_android_surface) {
12551         auto support_predicate = [this](decltype(surface_state->gpu_queue_support)::value_type qs) -> bool {
12552             // TODO: should restrict search only to queue families of VkDeviceQueueCreateInfos, not whole phys. device
12553             return (qs.first.gpu == physical_device) && qs.second;
12554         };
12555         const auto &support = surface_state->gpu_queue_support;
12556         bool is_supported = std::any_of(support.begin(), support.end(), support_predicate);
12557 
12558         if (!is_supported) {
12559             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12560                         "VUID-VkSwapchainCreateInfoKHR-surface-01270",
12561                         "%s: pCreateInfo->surface is not known at this time to be supported for presentation by this device. The "
12562                         "vkGetPhysicalDeviceSurfaceSupportKHR() must be called beforehand, and it must return VK_TRUE support with "
12563                         "this surface for at least one queue family of this device.",
12564                         func_name))
12565                 return true;
12566         }
12567     }
12568 
12569     if (old_swapchain_state) {
12570         if (old_swapchain_state->createInfo.surface != pCreateInfo->surface) {
12571             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
12572                         HandleToUint64(pCreateInfo->oldSwapchain), "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
12573                         "%s: pCreateInfo->oldSwapchain's surface is not pCreateInfo->surface", func_name))
12574                 return true;
12575         }
12576         if (old_swapchain_state->retired) {
12577             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
12578                         HandleToUint64(pCreateInfo->oldSwapchain), "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
12579                         "%s: pCreateInfo->oldSwapchain is retired", func_name))
12580                 return true;
12581         }
12582     }
12583 
12584     if ((pCreateInfo->imageExtent.width == 0) || (pCreateInfo->imageExtent.height == 0)) {
12585         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12586                     "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689", "%s: pCreateInfo->imageExtent = (%d, %d) which is illegal.",
12587                     func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height))
12588             return true;
12589     }
12590 
12591     auto physical_device_state = GetPhysicalDeviceState();
12592     bool skip = false;
12593     VkSurfaceTransformFlagBitsKHR currentTransform = physical_device_state->surfaceCapabilities.currentTransform;
12594     if ((pCreateInfo->preTransform & currentTransform) != pCreateInfo->preTransform) {
12595         skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
12596                         HandleToUint64(physical_device), kVUID_Core_Swapchain_PreTransform,
12597                         "%s: pCreateInfo->preTransform (%s) doesn't match the currentTransform (%s) returned by "
12598                         "vkGetPhysicalDeviceSurfaceCapabilitiesKHR, the presentation engine will transform the image "
12599                         "content as part of the presentation operation.",
12600                         func_name, string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform),
12601                         string_VkSurfaceTransformFlagBitsKHR(currentTransform));
12602     }
12603 
12604     if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState == UNCALLED) {
12605         if (log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
12606                     HandleToUint64(physical_device), kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
12607                     "%s: surface capabilities not retrieved for this physical device", func_name))
12608             return true;
12609     }
12610 
12611     VkSurfaceCapabilitiesKHR capabilities{};
12612     DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_state->phys_device, pCreateInfo->surface, &capabilities);
12613     // Validate pCreateInfo->minImageCount against VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
12614     if (pCreateInfo->minImageCount < capabilities.minImageCount) {
12615         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12616                     "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271",
12617                     "%s called with minImageCount = %d, which is outside the bounds returned by "
12618                     "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
12619                     func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
12620             return true;
12621     }
12622 
12623     if ((capabilities.maxImageCount > 0) && (pCreateInfo->minImageCount > capabilities.maxImageCount)) {
12624         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12625                     "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272",
12626                     "%s called with minImageCount = %d, which is outside the bounds returned by "
12627                     "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
12628                     func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
12629             return true;
12630     }
12631 
12632     // Validate pCreateInfo->imageExtent against VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
12633     if ((pCreateInfo->imageExtent.width < capabilities.minImageExtent.width) ||
12634         (pCreateInfo->imageExtent.width > capabilities.maxImageExtent.width) ||
12635         (pCreateInfo->imageExtent.height < capabilities.minImageExtent.height) ||
12636         (pCreateInfo->imageExtent.height > capabilities.maxImageExtent.height)) {
12637         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12638                     "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274",
12639                     "%s called with imageExtent = (%d,%d), which is outside the bounds returned by "
12640                     "vkGetPhysicalDeviceSurfaceCapabilitiesKHR(): currentExtent = (%d,%d), minImageExtent = (%d,%d), "
12641                     "maxImageExtent = (%d,%d).",
12642                     func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height, capabilities.currentExtent.width,
12643                     capabilities.currentExtent.height, capabilities.minImageExtent.width, capabilities.minImageExtent.height,
12644                     capabilities.maxImageExtent.width, capabilities.maxImageExtent.height))
12645             return true;
12646     }
12647     // pCreateInfo->preTransform should have exactly one bit set, and that bit must also be set in
12648     // VkSurfaceCapabilitiesKHR::supportedTransforms.
12649     if (!pCreateInfo->preTransform || (pCreateInfo->preTransform & (pCreateInfo->preTransform - 1)) ||
12650         !(pCreateInfo->preTransform & capabilities.supportedTransforms)) {
12651         // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message.  Build
12652         // it up a little at a time, and then log it:
12653         std::string errorString = "";
12654         char str[1024];
12655         // Here's the first part of the message:
12656         sprintf(str, "%s called with a non-supported pCreateInfo->preTransform (i.e. %s).  Supported values are:\n", func_name,
12657                 string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform));
12658         errorString += str;
12659         for (int i = 0; i < 32; i++) {
12660             // Build up the rest of the message:
12661             if ((1 << i) & capabilities.supportedTransforms) {
12662                 const char *newStr = string_VkSurfaceTransformFlagBitsKHR((VkSurfaceTransformFlagBitsKHR)(1 << i));
12663                 sprintf(str, "    %s\n", newStr);
12664                 errorString += str;
12665             }
12666         }
12667         // Log the message that we've built up:
12668         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12669                     "VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "%s.", errorString.c_str()))
12670             return true;
12671     }
12672 
12673     // pCreateInfo->compositeAlpha should have exactly one bit set, and that bit must also be set in
12674     // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha
12675     if (!pCreateInfo->compositeAlpha || (pCreateInfo->compositeAlpha & (pCreateInfo->compositeAlpha - 1)) ||
12676         !((pCreateInfo->compositeAlpha) & capabilities.supportedCompositeAlpha)) {
12677         // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message.  Build
12678         // it up a little at a time, and then log it:
12679         std::string errorString = "";
12680         char str[1024];
12681         // Here's the first part of the message:
12682         sprintf(str, "%s called with a non-supported pCreateInfo->compositeAlpha (i.e. %s).  Supported values are:\n", func_name,
12683                 string_VkCompositeAlphaFlagBitsKHR(pCreateInfo->compositeAlpha));
12684         errorString += str;
12685         for (int i = 0; i < 32; i++) {
12686             // Build up the rest of the message:
12687             if ((1 << i) & capabilities.supportedCompositeAlpha) {
12688                 const char *newStr = string_VkCompositeAlphaFlagBitsKHR((VkCompositeAlphaFlagBitsKHR)(1 << i));
12689                 sprintf(str, "    %s\n", newStr);
12690                 errorString += str;
12691             }
12692         }
12693         // Log the message that we've built up:
12694         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12695                     "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "%s.", errorString.c_str()))
12696             return true;
12697     }
12698     // Validate pCreateInfo->imageArrayLayers against VkSurfaceCapabilitiesKHR::maxImageArrayLayers:
12699     if (pCreateInfo->imageArrayLayers > capabilities.maxImageArrayLayers) {
12700         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12701                     "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275",
12702                     "%s called with a non-supported imageArrayLayers (i.e. %d).  Maximum value is %d.", func_name,
12703                     pCreateInfo->imageArrayLayers, capabilities.maxImageArrayLayers))
12704             return true;
12705     }
12706     // Validate pCreateInfo->imageUsage against VkSurfaceCapabilitiesKHR::supportedUsageFlags:
12707     if (pCreateInfo->imageUsage != (pCreateInfo->imageUsage & capabilities.supportedUsageFlags)) {
12708         if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12709                     "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276",
12710                     "%s called with a non-supported pCreateInfo->imageUsage (i.e. 0x%08x).  Supported flag bits are 0x%08x.",
12711                     func_name, pCreateInfo->imageUsage, capabilities.supportedUsageFlags))
12712             return true;
12713     }
12714 
12715     if (device_extensions.vk_khr_surface_protected_capabilities && (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)) {
12716         VkPhysicalDeviceSurfaceInfo2KHR surfaceInfo = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR};
12717         surfaceInfo.surface = pCreateInfo->surface;
12718         VkSurfaceProtectedCapabilitiesKHR surfaceProtectedCapabilities = {VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR};
12719         VkSurfaceCapabilities2KHR surfaceCapabilities = {VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR};
12720         surfaceCapabilities.pNext = &surfaceProtectedCapabilities;
12721         DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(physical_device_state->phys_device, &surfaceInfo, &surfaceCapabilities);
12722 
12723         if (!surfaceProtectedCapabilities.supportsProtected) {
12724             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12725                         "VUID-VkSwapchainCreateInfoKHR-flags-03187",
12726                         "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR but the surface "
12727                         "capabilities does not have VkSurfaceProtectedCapabilitiesKHR.supportsProtected set to VK_TRUE.",
12728                         func_name))
12729                 return true;
12730         }
12731     }
12732 
12733     std::vector<VkSurfaceFormatKHR> surface_formats;
12734     const auto *surface_formats_ref = &surface_formats;
12735 
12736     // Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfaceFormatsKHR():
12737     if (physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState != QUERY_DETAILS) {
12738         if (log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12739                     kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
12740                     "%s called before getting format(s) from vkGetPhysicalDeviceSurfaceFormatsKHR().", func_name)) {
12741             return true;
12742         }
12743         uint32_t surface_format_count = 0;
12744         DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count, nullptr);
12745         surface_formats.resize(surface_format_count);
12746         DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count,
12747                                                    &surface_formats[0]);
12748     } else {
12749         surface_formats_ref = &physical_device_state->surface_formats;
12750     }
12751 
12752     {
12753         // Validate pCreateInfo->imageFormat against VkSurfaceFormatKHR::format:
12754         bool foundFormat = false;
12755         bool foundColorSpace = false;
12756         bool foundMatch = false;
12757         for (auto const &format : *surface_formats_ref) {
12758             if (pCreateInfo->imageFormat == format.format) {
12759                 // Validate pCreateInfo->imageColorSpace against VkSurfaceFormatKHR::colorSpace:
12760                 foundFormat = true;
12761                 if (pCreateInfo->imageColorSpace == format.colorSpace) {
12762                     foundMatch = true;
12763                     break;
12764                 }
12765             } else {
12766                 if (pCreateInfo->imageColorSpace == format.colorSpace) {
12767                     foundColorSpace = true;
12768                 }
12769             }
12770         }
12771         if (!foundMatch) {
12772             if (!foundFormat) {
12773                 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12774                             HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
12775                             "%s called with a non-supported pCreateInfo->imageFormat (i.e. %d).", func_name,
12776                             pCreateInfo->imageFormat))
12777                     return true;
12778             }
12779             if (!foundColorSpace) {
12780                 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12781                             HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
12782                             "%s called with a non-supported pCreateInfo->imageColorSpace (i.e. %d).", func_name,
12783                             pCreateInfo->imageColorSpace))
12784                     return true;
12785             }
12786         }
12787     }
12788 
12789     // Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfacePresentModesKHR():
12790     if (physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState != QUERY_DETAILS) {
12791         // FIFO is required to always be supported
12792         if (pCreateInfo->presentMode != VK_PRESENT_MODE_FIFO_KHR) {
12793             if (log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12794                         HandleToUint64(device), kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
12795                         "%s called before getting present mode(s) from vkGetPhysicalDeviceSurfacePresentModesKHR().", func_name))
12796                 return true;
12797         }
12798     } else {
12799         // Validate pCreateInfo->presentMode against vkGetPhysicalDeviceSurfacePresentModesKHR():
12800         bool foundMatch = std::find(physical_device_state->present_modes.begin(), physical_device_state->present_modes.end(),
12801                                     pCreateInfo->presentMode) != physical_device_state->present_modes.end();
12802         if (!foundMatch) {
12803             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12804                         "VUID-VkSwapchainCreateInfoKHR-presentMode-01281", "%s called with a non-supported presentMode (i.e. %s).",
12805                         func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
12806                 return true;
12807         }
12808     }
12809     // Validate state for shared presentable case
12810     if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
12811         VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
12812         if (!device_extensions.vk_khr_shared_presentable_image) {
12813             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12814                         kVUID_Core_DrawState_ExtensionNotEnabled,
12815                         "%s called with presentMode %s which requires the VK_KHR_shared_presentable_image extension, which has not "
12816                         "been enabled.",
12817                         func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
12818                 return true;
12819         } else if (pCreateInfo->minImageCount != 1) {
12820             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12821                         "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383",
12822                         "%s called with presentMode %s, but minImageCount value is %d. For shared presentable image, minImageCount "
12823                         "must be 1.",
12824                         func_name, string_VkPresentModeKHR(pCreateInfo->presentMode), pCreateInfo->minImageCount))
12825                 return true;
12826         }
12827     }
12828 
12829     if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
12830         if (!device_extensions.vk_khr_swapchain_mutable_format) {
12831             if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12832                         kVUID_Core_DrawState_ExtensionNotEnabled,
12833                         "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR which requires the "
12834                         "VK_KHR_swapchain_mutable_format extension, which has not been enabled.",
12835                         func_name))
12836                 return true;
12837         } else {
12838             const auto *image_format_list = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pCreateInfo->pNext);
12839             if (image_format_list == nullptr) {
12840                 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12841                             HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
12842                             "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the pNext chain of "
12843                             "pCreateInfo does not contain an instance of VkImageFormatListCreateInfoKHR.",
12844                             func_name))
12845                     return true;
12846             } else if (image_format_list->viewFormatCount == 0) {
12847                 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12848                             HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
12849                             "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the viewFormatCount "
12850                             "member of VkImageFormatListCreateInfoKHR in the pNext chain is zero.",
12851                             func_name))
12852                     return true;
12853             } else {
12854                 bool found_base_format = false;
12855                 for (uint32_t i = 0; i < image_format_list->viewFormatCount; ++i) {
12856                     if (image_format_list->pViewFormats[i] == pCreateInfo->imageFormat) {
12857                         found_base_format = true;
12858                         break;
12859                     }
12860                 }
12861                 if (!found_base_format) {
12862                     if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12863                                 HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
12864                                 "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but none of the "
12865                                 "elements of the pViewFormats member of VkImageFormatListCreateInfoKHR match "
12866                                 "pCreateInfo->imageFormat.",
12867                                 func_name))
12868                         return true;
12869                 }
12870             }
12871         }
12872     }
12873 
12874     if ((pCreateInfo->imageSharingMode == VK_SHARING_MODE_CONCURRENT) && pCreateInfo->pQueueFamilyIndices) {
12875         bool skip1 =
12876             ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateBuffer",
12877                                   "pCreateInfo->pQueueFamilyIndices", "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428",
12878                                   "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428", false);
12879         if (skip1) return true;
12880     }
12881 
12882     return skip;
12883 }
12884 
PreCallValidateCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)12885 bool CoreChecks::PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
12886                                                    const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
12887     const auto surface_state = GetSurfaceState(pCreateInfo->surface);
12888     const auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
12889     return ValidateCreateSwapchain("vkCreateSwapchainKHR()", pCreateInfo, surface_state, old_swapchain_state);
12890 }
12891 
RecordCreateSwapchainState(VkResult result,const VkSwapchainCreateInfoKHR * pCreateInfo,VkSwapchainKHR * pSwapchain,SURFACE_STATE * surface_state,SWAPCHAIN_NODE * old_swapchain_state)12892 void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
12893                                                         VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
12894                                                         SWAPCHAIN_NODE *old_swapchain_state) {
12895     if (VK_SUCCESS == result) {
12896         auto swapchain_state = unique_ptr<SWAPCHAIN_NODE>(new SWAPCHAIN_NODE(pCreateInfo, *pSwapchain));
12897         if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
12898             VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
12899             swapchain_state->shared_presentable = true;
12900         }
12901         surface_state->swapchain = swapchain_state.get();
12902         swapchainMap[*pSwapchain] = std::move(swapchain_state);
12903     } else {
12904         surface_state->swapchain = nullptr;
12905     }
12906     // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
12907     if (old_swapchain_state) {
12908         old_swapchain_state->retired = true;
12909     }
12910     return;
12911 }
12912 
PostCallRecordCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain,VkResult result)12913 void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
12914                                                               const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
12915                                                               VkResult result) {
12916     auto surface_state = GetSurfaceState(pCreateInfo->surface);
12917     auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
12918     RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
12919 }
12920 
PreCallRecordDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)12921 void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
12922                                                               const VkAllocationCallbacks *pAllocator) {
12923     if (!swapchain) return;
12924     auto swapchain_data = GetSwapchainState(swapchain);
12925     if (swapchain_data) {
12926         for (const auto &swapchain_image : swapchain_data->images) {
12927             ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image, kVulkanObjectTypeImage));
12928             imageMap.erase(swapchain_image);
12929         }
12930 
12931         auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
12932         if (surface_state) {
12933             if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
12934         }
12935 
12936         swapchainMap.erase(swapchain);
12937     }
12938 }
PreCallRecordDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)12939 void CoreChecks::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
12940                                                   const VkAllocationCallbacks *pAllocator) {
12941     if (swapchain) {
12942         auto swapchain_data = GetSwapchainState(swapchain);
12943         if (swapchain_data) {
12944             for (const auto &swapchain_image : swapchain_data->images) {
12945                 auto image_sub = imageSubresourceMap.find(swapchain_image);
12946                 if (image_sub != imageSubresourceMap.end()) {
12947                     for (auto imgsubpair : image_sub->second) {
12948                         auto image_item = imageLayoutMap.find(imgsubpair);
12949                         if (image_item != imageLayoutMap.end()) {
12950                             imageLayoutMap.erase(image_item);
12951                         }
12952                     }
12953                     imageSubresourceMap.erase(image_sub);
12954                 }
12955                 EraseQFOImageRelaseBarriers(swapchain_image);
12956             }
12957         }
12958     }
12959     StateTracker::PreCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
12960 }
12961 
PreCallValidateGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)12962 bool CoreChecks::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
12963                                                       VkImage *pSwapchainImages) {
12964     auto swapchain_state = GetSwapchainState(swapchain);
12965     bool skip = false;
12966     if (swapchain_state && pSwapchainImages) {
12967         // Compare the preliminary value of *pSwapchainImageCount with the value this time:
12968         if (swapchain_state->vkGetSwapchainImagesKHRState == UNCALLED) {
12969             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
12970                             HandleToUint64(device), kVUID_Core_Swapchain_PriorCount,
12971                             "vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImageCount; but no prior positive value has "
12972                             "been seen for pSwapchainImages.");
12973         } else if (*pSwapchainImageCount > swapchain_state->get_swapchain_image_count) {
12974             skip |=
12975                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
12976                         kVUID_Core_Swapchain_InvalidCount,
12977                         "vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImageCount, and with pSwapchainImages set to a "
12978                         "value (%d) that is greater than the value (%d) that was returned when pSwapchainImageCount was NULL.",
12979                         *pSwapchainImageCount, swapchain_state->get_swapchain_image_count);
12980         }
12981     }
12982     return skip;
12983 }
12984 
PostCallRecordGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages,VkResult result)12985 void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
12986                                                      VkImage *pSwapchainImages, VkResult result) {
12987     if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
12988     auto swapchain_state = GetSwapchainState(swapchain);
12989 
12990     if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
12991 
12992     if (pSwapchainImages) {
12993         if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
12994             swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
12995         }
12996         for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
12997             if (swapchain_state->images[i] != VK_NULL_HANDLE) continue;  // Already retrieved this.
12998 
12999             IMAGE_LAYOUT_STATE image_layout_node;
13000             image_layout_node.layout = VK_IMAGE_LAYOUT_UNDEFINED;
13001             image_layout_node.format = swapchain_state->createInfo.imageFormat;
13002             // Add imageMap entries for each swapchain image
13003             VkImageCreateInfo image_ci = {};
13004             image_ci.flags = 0;
13005             image_ci.imageType = VK_IMAGE_TYPE_2D;
13006             image_ci.format = swapchain_state->createInfo.imageFormat;
13007             image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
13008             image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
13009             image_ci.extent.depth = 1;
13010             image_ci.mipLevels = 1;
13011             image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
13012             image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
13013             image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
13014             image_ci.usage = swapchain_state->createInfo.imageUsage;
13015             image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
13016             imageMap[pSwapchainImages[i]] = unique_ptr<IMAGE_STATE>(new IMAGE_STATE(pSwapchainImages[i], &image_ci));
13017             auto &image_state = imageMap[pSwapchainImages[i]];
13018             image_state->valid = false;
13019             image_state->binding.mem = MEMTRACKER_SWAP_CHAIN_IMAGE_KEY;
13020             swapchain_state->images[i] = pSwapchainImages[i];
13021             ImageSubresourcePair subpair = {pSwapchainImages[i], false, VkImageSubresource()};
13022             imageSubresourceMap[pSwapchainImages[i]].push_back(subpair);
13023             imageLayoutMap[subpair] = image_layout_node;
13024         }
13025     }
13026 
13027     if (*pSwapchainImageCount) {
13028         if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
13029             swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
13030         }
13031         swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
13032     }
13033 }
13034 
PreCallValidateQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)13035 bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
13036     bool skip = false;
13037     auto queue_state = GetQueueState(queue);
13038 
13039     for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
13040         auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
13041         if (pSemaphore && !pSemaphore->signaled) {
13042             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
13043                             kVUID_Core_DrawState_QueueForwardProgress, "%s is waiting on %s that has no way to be signaled.",
13044                             report_data->FormatHandle(queue).c_str(),
13045                             report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
13046         }
13047     }
13048 
13049     for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
13050         auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
13051         if (swapchain_data) {
13052             if (pPresentInfo->pImageIndices[i] >= swapchain_data->images.size()) {
13053                 skip |=
13054                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13055                             HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainInvalidImage,
13056                             "vkQueuePresentKHR: Swapchain image index too large (%u). There are only %u images in this swapchain.",
13057                             pPresentInfo->pImageIndices[i], (uint32_t)swapchain_data->images.size());
13058             } else {
13059                 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]];
13060                 auto image_state = GetImageState(image);
13061 
13062                 if (image_state->shared_presentable) {
13063                     image_state->layout_locked = true;
13064                 }
13065 
13066                 if (!image_state->acquired) {
13067                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13068                                     HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainImageNotAcquired,
13069                                     "vkQueuePresentKHR: Swapchain image index %u has not been acquired.",
13070                                     pPresentInfo->pImageIndices[i]);
13071                 }
13072 
13073                 vector<VkImageLayout> layouts;
13074                 if (FindLayouts(image, layouts)) {
13075                     for (auto layout : layouts) {
13076                         if ((layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) && (!device_extensions.vk_khr_shared_presentable_image ||
13077                                                                             (layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR))) {
13078                             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
13079                                             HandleToUint64(queue), "VUID-VkPresentInfoKHR-pImageIndices-01296",
13080                                             "Images passed to present must be in layout VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or "
13081                                             "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR but is in %s.",
13082                                             string_VkImageLayout(layout));
13083                         }
13084                     }
13085                 }
13086             }
13087 
13088             // All physical devices and queue families are required to be able to present to any native window on Android; require
13089             // the application to have established support on any other platform.
13090             if (!instance_extensions.vk_khr_android_surface) {
13091                 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
13092                 auto support_it = surface_state->gpu_queue_support.find({physical_device, queue_state->queueFamilyIndex});
13093 
13094                 if (support_it == surface_state->gpu_queue_support.end()) {
13095                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13096                                     HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainUnsupportedQueue,
13097                                     "vkQueuePresentKHR: Presenting image without calling vkGetPhysicalDeviceSurfaceSupportKHR");
13098                 } else if (!support_it->second) {
13099                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13100                                     HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-vkQueuePresentKHR-pSwapchains-01292",
13101                                     "vkQueuePresentKHR: Presenting image on queue that cannot present to this surface.");
13102                 }
13103             }
13104         }
13105     }
13106     if (pPresentInfo && pPresentInfo->pNext) {
13107         // Verify ext struct
13108         const auto *present_regions = lvl_find_in_chain<VkPresentRegionsKHR>(pPresentInfo->pNext);
13109         if (present_regions) {
13110             for (uint32_t i = 0; i < present_regions->swapchainCount; ++i) {
13111                 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
13112                 assert(swapchain_data);
13113                 VkPresentRegionKHR region = present_regions->pRegions[i];
13114                 for (uint32_t j = 0; j < region.rectangleCount; ++j) {
13115                     VkRectLayerKHR rect = region.pRectangles[j];
13116                     if ((rect.offset.x + rect.extent.width) > swapchain_data->createInfo.imageExtent.width) {
13117                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13118                                         HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-offset-01261",
13119                                         "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
13120                                         "pRegion[%i].pRectangles[%i], the sum of offset.x (%i) and extent.width (%i) is greater "
13121                                         "than the corresponding swapchain's imageExtent.width (%i).",
13122                                         i, j, rect.offset.x, rect.extent.width, swapchain_data->createInfo.imageExtent.width);
13123                     }
13124                     if ((rect.offset.y + rect.extent.height) > swapchain_data->createInfo.imageExtent.height) {
13125                         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13126                                         HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-offset-01261",
13127                                         "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
13128                                         "pRegion[%i].pRectangles[%i], the sum of offset.y (%i) and extent.height (%i) is greater "
13129                                         "than the corresponding swapchain's imageExtent.height (%i).",
13130                                         i, j, rect.offset.y, rect.extent.height, swapchain_data->createInfo.imageExtent.height);
13131                     }
13132                     if (rect.layer > swapchain_data->createInfo.imageArrayLayers) {
13133                         skip |= log_msg(
13134                             report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13135                             HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-layer-01262",
13136                             "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, pRegion[%i].pRectangles[%i], the layer "
13137                             "(%i) is greater than the corresponding swapchain's imageArrayLayers (%i).",
13138                             i, j, rect.layer, swapchain_data->createInfo.imageArrayLayers);
13139                     }
13140                 }
13141             }
13142         }
13143 
13144         const auto *present_times_info = lvl_find_in_chain<VkPresentTimesInfoGOOGLE>(pPresentInfo->pNext);
13145         if (present_times_info) {
13146             if (pPresentInfo->swapchainCount != present_times_info->swapchainCount) {
13147                 skip |=
13148                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13149                             HandleToUint64(pPresentInfo->pSwapchains[0]), "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247",
13150                             "vkQueuePresentKHR(): VkPresentTimesInfoGOOGLE.swapchainCount is %i but pPresentInfo->swapchainCount "
13151                             "is %i. For VkPresentTimesInfoGOOGLE down pNext chain of VkPresentInfoKHR, "
13152                             "VkPresentTimesInfoGOOGLE.swapchainCount must equal VkPresentInfoKHR.swapchainCount.",
13153                             present_times_info->swapchainCount, pPresentInfo->swapchainCount);
13154             }
13155         }
13156     }
13157 
13158     return skip;
13159 }
13160 
PostCallRecordQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo,VkResult result)13161 void CoreChecks::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
13162     // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
13163     for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
13164         auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
13165         if (pSemaphore) {
13166             pSemaphore->signaler.first = VK_NULL_HANDLE;
13167             pSemaphore->signaled = false;
13168         }
13169     }
13170 
13171     for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
13172         // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
13173         // confused itself just as much.
13174         auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
13175         if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue;  // this present didn't actually happen.
13176         // Mark the image as having been released to the WSI
13177         auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
13178         if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
13179             auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]];
13180             auto image_state = GetImageState(image);
13181             if (image_state) {
13182                 image_state->acquired = false;
13183             }
13184         }
13185     }
13186     // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
13187     // its semaphore waits) /never/ participate in any completion proof.
13188 }
13189 
PreCallValidateCreateSharedSwapchainsKHR(VkDevice device,uint32_t swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)13190 bool CoreChecks::PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
13191                                                           const VkSwapchainCreateInfoKHR *pCreateInfos,
13192                                                           const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
13193     bool skip = false;
13194     if (pCreateInfos) {
13195         for (uint32_t i = 0; i < swapchainCount; i++) {
13196             const auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
13197             const auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
13198             std::stringstream func_name;
13199             func_name << "vkCreateSharedSwapchainsKHR[" << swapchainCount << "]()";
13200             skip |= ValidateCreateSwapchain(func_name.str().c_str(), &pCreateInfos[i], surface_state, old_swapchain_state);
13201         }
13202     }
13203     return skip;
13204 }
13205 
PostCallRecordCreateSharedSwapchainsKHR(VkDevice device,uint32_t swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains,VkResult result)13206 void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
13207                                                                      const VkSwapchainCreateInfoKHR *pCreateInfos,
13208                                                                      const VkAllocationCallbacks *pAllocator,
13209                                                                      VkSwapchainKHR *pSwapchains, VkResult result) {
13210     if (pCreateInfos) {
13211         for (uint32_t i = 0; i < swapchainCount; i++) {
13212             auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
13213             auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
13214             RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
13215         }
13216     }
13217 }
13218 
ValidateAcquireNextImage(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex,const char * func_name) const13219 bool CoreChecks::ValidateAcquireNextImage(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
13220                                           VkFence fence, uint32_t *pImageIndex, const char *func_name) const {
13221     bool skip = false;
13222     if (fence == VK_NULL_HANDLE && semaphore == VK_NULL_HANDLE) {
13223         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
13224                         "VUID-vkAcquireNextImageKHR-semaphore-01780",
13225                         "%s: Semaphore and fence cannot both be VK_NULL_HANDLE. There would be no way to "
13226                         "determine the completion of this operation.",
13227                         func_name);
13228     }
13229 
13230     auto pSemaphore = GetSemaphoreState(semaphore);
13231     if (pSemaphore && pSemaphore->scope == kSyncScopeInternal && pSemaphore->signaled) {
13232         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
13233                         HandleToUint64(semaphore), "VUID-vkAcquireNextImageKHR-semaphore-01286",
13234                         "%s: Semaphore must not be currently signaled or in a wait state.", func_name);
13235     }
13236 
13237     auto pFence = GetFenceState(fence);
13238     if (pFence) {
13239         skip |= ValidateFenceForSubmit(pFence);
13240     }
13241 
13242     const auto swapchain_data = GetSwapchainState(swapchain);
13243     if (swapchain_data) {
13244         if (swapchain_data->retired) {
13245             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13246                             HandleToUint64(swapchain), "VUID-vkAcquireNextImageKHR-swapchain-01285",
13247                             "%s: This swapchain has been retired. The application can still present any images it "
13248                             "has acquired, but cannot acquire any more.",
13249                             func_name);
13250         }
13251 
13252         auto physical_device_state = GetPhysicalDeviceState();
13253         if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState != UNCALLED) {
13254             uint64_t acquired_images = std::count_if(swapchain_data->images.begin(), swapchain_data->images.end(),
13255                                                      [=](VkImage image) { return GetImageState(image)->acquired; });
13256             if (acquired_images > swapchain_data->images.size() - physical_device_state->surfaceCapabilities.minImageCount) {
13257                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13258                                 HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainTooManyImages,
13259                                 "%s: Application has already acquired the maximum number of images (0x%" PRIxLEAST64 ")", func_name,
13260                                 acquired_images);
13261             }
13262         }
13263 
13264         if (swapchain_data->images.size() == 0) {
13265             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13266                             HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainImagesNotFound,
13267                             "%s: No images found to acquire from. Application probably did not call "
13268                             "vkGetSwapchainImagesKHR after swapchain creation.",
13269                             func_name);
13270         }
13271     }
13272     return skip;
13273 }
13274 
PreCallValidateAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)13275 bool CoreChecks::PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
13276                                                     VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
13277     return ValidateAcquireNextImage(device, swapchain, timeout, semaphore, fence, pImageIndex, "vkAcquireNextImageKHR");
13278 }
13279 
PreCallValidateAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)13280 bool CoreChecks::PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
13281                                                      uint32_t *pImageIndex) {
13282     bool skip = false;
13283     skip |= ValidateDeviceMaskToPhysicalDeviceCount(pAcquireInfo->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13284                                                     HandleToUint64(pAcquireInfo->swapchain),
13285                                                     "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290");
13286     skip |= ValidateDeviceMaskToZero(pAcquireInfo->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
13287                                      HandleToUint64(pAcquireInfo->swapchain), "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291");
13288     skip |= ValidateAcquireNextImage(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
13289                                      pAcquireInfo->fence, pImageIndex, "vkAcquireNextImage2KHR");
13290     return skip;
13291 }
13292 
RecordAcquireNextImageState(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)13293 void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
13294                                                          VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
13295     auto pFence = GetFenceState(fence);
13296     if (pFence && pFence->scope == kSyncScopeInternal) {
13297         // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
13298         // import
13299         pFence->state = FENCE_INFLIGHT;
13300         pFence->signaler.first = VK_NULL_HANDLE;  // ANI isn't on a queue, so this can't participate in a completion proof.
13301     }
13302 
13303     auto pSemaphore = GetSemaphoreState(semaphore);
13304     if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
13305         // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
13306         // temporary import
13307         pSemaphore->signaled = true;
13308         pSemaphore->signaler.first = VK_NULL_HANDLE;
13309     }
13310 
13311     // Mark the image as acquired.
13312     auto swapchain_data = GetSwapchainState(swapchain);
13313     if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
13314         auto image = swapchain_data->images[*pImageIndex];
13315         auto image_state = GetImageState(image);
13316         if (image_state) {
13317             image_state->acquired = true;
13318             image_state->shared_presentable = swapchain_data->shared_presentable;
13319         }
13320     }
13321 }
13322 
PostCallRecordAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex,VkResult result)13323 void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
13324                                                                VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
13325                                                                VkResult result) {
13326     if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
13327     RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
13328 }
13329 
PostCallRecordAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,VkResult result)13330 void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
13331                                                                 uint32_t *pImageIndex, VkResult result) {
13332     if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
13333     RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
13334                                 pAcquireInfo->fence, pImageIndex);
13335 }
13336 
PostCallRecordEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices,VkResult result)13337 void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
13338                                                                     VkPhysicalDevice *pPhysicalDevices, VkResult result) {
13339     if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
13340         for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
13341             auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
13342             phys_device_state.phys_device = pPhysicalDevices[i];
13343             // Init actual features for each physical device
13344             DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
13345         }
13346     }
13347 }
13348 
13349 // Common function to handle validation for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
ValidateCommonGetPhysicalDeviceQueueFamilyProperties(debug_report_data * report_data,const PHYSICAL_DEVICE_STATE * pd_state,uint32_t requested_queue_family_property_count,bool qfp_null,const char * caller_name)13350 static bool ValidateCommonGetPhysicalDeviceQueueFamilyProperties(debug_report_data *report_data,
13351                                                                  const PHYSICAL_DEVICE_STATE *pd_state,
13352                                                                  uint32_t requested_queue_family_property_count, bool qfp_null,
13353                                                                  const char *caller_name) {
13354     bool skip = false;
13355     if (!qfp_null) {
13356         // Verify that for each physical device, this command is called first with NULL pQueueFamilyProperties in order to get count
13357         if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
13358             skip |= log_msg(
13359                 report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
13360                 HandleToUint64(pd_state->phys_device), kVUID_Core_DevLimit_MissingQueryCount,
13361                 "%s is called with non-NULL pQueueFamilyProperties before obtaining pQueueFamilyPropertyCount. It is recommended "
13362                 "to first call %s with NULL pQueueFamilyProperties in order to obtain the maximal pQueueFamilyPropertyCount.",
13363                 caller_name, caller_name);
13364             // Then verify that pCount that is passed in on second call matches what was returned
13365         } else if (pd_state->queue_family_known_count != requested_queue_family_property_count) {
13366             skip |= log_msg(
13367                 report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
13368                 HandleToUint64(pd_state->phys_device), kVUID_Core_DevLimit_CountMismatch,
13369                 "%s is called with non-NULL pQueueFamilyProperties and pQueueFamilyPropertyCount value %" PRIu32
13370                 ", but the largest previously returned pQueueFamilyPropertyCount for this physicalDevice is %" PRIu32
13371                 ". It is recommended to instead receive all the properties by calling %s with pQueueFamilyPropertyCount that was "
13372                 "previously obtained by calling %s with NULL pQueueFamilyProperties.",
13373                 caller_name, requested_queue_family_property_count, pd_state->queue_family_known_count, caller_name, caller_name);
13374         }
13375     }
13376 
13377     return skip;
13378 }
13379 
PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)13380 bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
13381                                                                        uint32_t *pQueueFamilyPropertyCount,
13382                                                                        VkQueueFamilyProperties *pQueueFamilyProperties) {
13383     const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13384     assert(physical_device_state);
13385     return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
13386                                                                 (nullptr == pQueueFamilyProperties),
13387                                                                 "vkGetPhysicalDeviceQueueFamilyProperties()");
13388 }
13389 
PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2KHR * pQueueFamilyProperties)13390 bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
13391                                                                         uint32_t *pQueueFamilyPropertyCount,
13392                                                                         VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
13393     const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13394     assert(physical_device_state);
13395     return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
13396                                                                 (nullptr == pQueueFamilyProperties),
13397                                                                 "vkGetPhysicalDeviceQueueFamilyProperties2()");
13398 }
13399 
PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2KHR * pQueueFamilyProperties)13400 bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
13401                                                                            uint32_t *pQueueFamilyPropertyCount,
13402                                                                            VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
13403     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13404     assert(physical_device_state);
13405     return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
13406                                                                 (nullptr == pQueueFamilyProperties),
13407                                                                 "vkGetPhysicalDeviceQueueFamilyProperties2KHR()");
13408 }
13409 
13410 // Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE * pd_state,uint32_t count,VkQueueFamilyProperties2KHR * pQueueFamilyProperties)13411 static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
13412                                                                     VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
13413     pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
13414 
13415     if (!pQueueFamilyProperties) {
13416         if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
13417             pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
13418     } else {  // Save queue family properties
13419         pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
13420 
13421         pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
13422         for (uint32_t i = 0; i < count; ++i) {
13423             pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
13424         }
13425     }
13426 }
13427 
PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)13428 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
13429                                                                                   uint32_t *pQueueFamilyPropertyCount,
13430                                                                                   VkQueueFamilyProperties *pQueueFamilyProperties) {
13431     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13432     assert(physical_device_state);
13433     VkQueueFamilyProperties2KHR *pqfp = nullptr;
13434     std::vector<VkQueueFamilyProperties2KHR> qfp;
13435     qfp.resize(*pQueueFamilyPropertyCount);
13436     if (pQueueFamilyProperties) {
13437         for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
13438             qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
13439             qfp[i].pNext = nullptr;
13440             qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
13441         }
13442         pqfp = qfp.data();
13443     }
13444     StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
13445 }
13446 
PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2KHR * pQueueFamilyProperties)13447 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
13448     VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
13449     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13450     assert(physical_device_state);
13451     StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
13452                                                             pQueueFamilyProperties);
13453 }
13454 
PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2KHR * pQueueFamilyProperties)13455 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
13456     VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
13457     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13458     assert(physical_device_state);
13459     StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
13460                                                             pQueueFamilyProperties);
13461 }
13462 
PreCallValidateDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)13463 bool CoreChecks::PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
13464                                                   const VkAllocationCallbacks *pAllocator) {
13465     const auto surface_state = GetSurfaceState(surface);
13466     bool skip = false;
13467     if ((surface_state) && (surface_state->swapchain)) {
13468         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
13469                         HandleToUint64(instance), "VUID-vkDestroySurfaceKHR-surface-01266",
13470                         "vkDestroySurfaceKHR() called before its associated VkSwapchainKHR was destroyed.");
13471     }
13472     return skip;
13473 }
13474 
PreCallRecordDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)13475 void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
13476                                                             const VkAllocationCallbacks *pAllocator) {
13477     surface_map.erase(surface);
13478 }
13479 
RecordVulkanSurface(VkSurfaceKHR * pSurface)13480 void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
13481     surface_map[*pSurface] = std::unique_ptr<SURFACE_STATE>(new SURFACE_STATE{*pSurface});
13482 }
13483 
PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13484 void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
13485                                                                         const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
13486                                                                         const VkAllocationCallbacks *pAllocator,
13487                                                                         VkSurfaceKHR *pSurface, VkResult result) {
13488     if (VK_SUCCESS != result) return;
13489     RecordVulkanSurface(pSurface);
13490 }
13491 
13492 #ifdef VK_USE_PLATFORM_ANDROID_KHR
PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13493 void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
13494                                                                    const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
13495                                                                    const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13496                                                                    VkResult result) {
13497     if (VK_SUCCESS != result) return;
13498     RecordVulkanSurface(pSurface);
13499 }
13500 #endif  // VK_USE_PLATFORM_ANDROID_KHR
13501 
13502 #ifdef VK_USE_PLATFORM_IOS_MVK
PostCallRecordCreateIOSSurfaceMVK(VkInstance instance,const VkIOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13503 void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
13504                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13505                                                                VkResult result) {
13506     if (VK_SUCCESS != result) return;
13507     RecordVulkanSurface(pSurface);
13508 }
13509 #endif  // VK_USE_PLATFORM_IOS_MVK
13510 
13511 #ifdef VK_USE_PLATFORM_MACOS_MVK
PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13512 void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
13513                                                                  const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
13514                                                                  const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13515                                                                  VkResult result) {
13516     if (VK_SUCCESS != result) return;
13517     RecordVulkanSurface(pSurface);
13518 }
13519 #endif  // VK_USE_PLATFORM_MACOS_MVK
13520 
13521 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13522 void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
13523                                                                    const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
13524                                                                    const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13525                                                                    VkResult result) {
13526     if (VK_SUCCESS != result) return;
13527     RecordVulkanSurface(pSurface);
13528 }
13529 
PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display)13530 bool CoreChecks::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
13531                                                                                uint32_t queueFamilyIndex,
13532                                                                                struct wl_display *display) {
13533     const auto pd_state = GetPhysicalDeviceState(physicalDevice);
13534     return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
13535                                     "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306",
13536                                     "vkGetPhysicalDeviceWaylandPresentationSupportKHR", "queueFamilyIndex");
13537 }
13538 #endif  // VK_USE_PLATFORM_WAYLAND_KHR
13539 
13540 #ifdef VK_USE_PLATFORM_WIN32_KHR
PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13541 void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
13542                                                                  const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
13543                                                                  const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13544                                                                  VkResult result) {
13545     if (VK_SUCCESS != result) return;
13546     RecordVulkanSurface(pSurface);
13547 }
13548 
PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)13549 bool CoreChecks::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
13550                                                                              uint32_t queueFamilyIndex) {
13551     const auto pd_state = GetPhysicalDeviceState(physicalDevice);
13552     return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
13553                                     "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309",
13554                                     "vkGetPhysicalDeviceWin32PresentationSupportKHR", "queueFamilyIndex");
13555 }
13556 #endif  // VK_USE_PLATFORM_WIN32_KHR
13557 
13558 #ifdef VK_USE_PLATFORM_XCB_KHR
PostCallRecordCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13559 void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
13560                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13561                                                                VkResult result) {
13562     if (VK_SUCCESS != result) return;
13563     RecordVulkanSurface(pSurface);
13564 }
13565 
PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)13566 bool CoreChecks::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
13567                                                                            uint32_t queueFamilyIndex, xcb_connection_t *connection,
13568                                                                            xcb_visualid_t visual_id) {
13569     const auto pd_state = GetPhysicalDeviceState(physicalDevice);
13570     return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
13571                                     "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312",
13572                                     "vkGetPhysicalDeviceXcbPresentationSupportKHR", "queueFamilyIndex");
13573 }
13574 #endif  // VK_USE_PLATFORM_XCB_KHR
13575 
13576 #ifdef VK_USE_PLATFORM_XLIB_KHR
PostCallRecordCreateXlibSurfaceKHR(VkInstance instance,const VkXlibSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface,VkResult result)13577 void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
13578                                                                 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
13579                                                                 VkResult result) {
13580     if (VK_SUCCESS != result) return;
13581     RecordVulkanSurface(pSurface);
13582 }
13583 
PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,Display * dpy,VisualID visualID)13584 bool CoreChecks::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
13585                                                                             uint32_t queueFamilyIndex, Display *dpy,
13586                                                                             VisualID visualID) {
13587     const auto pd_state = GetPhysicalDeviceState(physicalDevice);
13588     return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
13589                                     "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315",
13590                                     "vkGetPhysicalDeviceXlibPresentationSupportKHR", "queueFamilyIndex");
13591 }
13592 #endif  // VK_USE_PLATFORM_XLIB_KHR
13593 
PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities,VkResult result)13594 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
13595                                                                                    VkSurfaceKHR surface,
13596                                                                                    VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
13597                                                                                    VkResult result) {
13598     if (VK_SUCCESS != result) return;
13599     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13600     physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
13601     physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
13602 }
13603 
PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VkSurfaceCapabilities2KHR * pSurfaceCapabilities,VkResult result)13604 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
13605     VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
13606     VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
13607     if (VK_SUCCESS != result) return;
13608     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13609     physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
13610     physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
13611 }
13612 
PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilities2EXT * pSurfaceCapabilities,VkResult result)13613 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
13614                                                                                     VkSurfaceKHR surface,
13615                                                                                     VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
13616                                                                                     VkResult result) {
13617     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13618     physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
13619     physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
13620     physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
13621     physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
13622     physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
13623     physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
13624     physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
13625     physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
13626     physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
13627     physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
13628     physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
13629 }
13630 
PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)13631 bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
13632                                                                    VkSurfaceKHR surface, VkBool32 *pSupported) {
13633     const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13634     return ValidateQueueFamilyIndex(physical_device_state, queueFamilyIndex,
13635                                     "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269",
13636                                     "vkGetPhysicalDeviceSurfaceSupportKHR", "queueFamilyIndex");
13637 }
13638 
PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported,VkResult result)13639 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
13640                                                                               uint32_t queueFamilyIndex, VkSurfaceKHR surface,
13641                                                                               VkBool32 *pSupported, VkResult result) {
13642     if (VK_SUCCESS != result) return;
13643     auto surface_state = GetSurfaceState(surface);
13644     surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
13645 }
13646 
PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes,VkResult result)13647 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
13648                                                                                    VkSurfaceKHR surface,
13649                                                                                    uint32_t *pPresentModeCount,
13650                                                                                    VkPresentModeKHR *pPresentModes,
13651                                                                                    VkResult result) {
13652     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
13653 
13654     // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
13655     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13656     auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
13657 
13658     if (*pPresentModeCount) {
13659         if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
13660         if (*pPresentModeCount > physical_device_state->present_modes.size())
13661             physical_device_state->present_modes.resize(*pPresentModeCount);
13662     }
13663     if (pPresentModes) {
13664         if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
13665         for (uint32_t i = 0; i < *pPresentModeCount; i++) {
13666             physical_device_state->present_modes[i] = pPresentModes[i];
13667         }
13668     }
13669 }
13670 
PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)13671 bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
13672                                                                    uint32_t *pSurfaceFormatCount,
13673                                                                    VkSurfaceFormatKHR *pSurfaceFormats) {
13674     if (!pSurfaceFormats) return false;
13675     const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13676     const auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
13677     bool skip = false;
13678     switch (call_state) {
13679         case UNCALLED:
13680             // Since we haven't recorded a preliminary value of *pSurfaceFormatCount, that likely means that the application didn't
13681             // previously call this function with a NULL value of pSurfaceFormats:
13682             skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
13683                             HandleToUint64(physicalDevice), kVUID_Core_DevLimit_MustQueryCount,
13684                             "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount; but no prior "
13685                             "positive value has been seen for pSurfaceFormats.");
13686             break;
13687         default:
13688             auto prev_format_count = (uint32_t)physical_device_state->surface_formats.size();
13689             if (prev_format_count != *pSurfaceFormatCount) {
13690                 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
13691                                 HandleToUint64(physicalDevice), kVUID_Core_DevLimit_CountMismatch,
13692                                 "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount, and with "
13693                                 "pSurfaceFormats set to a value (%u) that is greater than the value (%u) that was returned "
13694                                 "when pSurfaceFormatCount was NULL.",
13695                                 *pSurfaceFormatCount, prev_format_count);
13696             }
13697             break;
13698     }
13699     return skip;
13700 }
13701 
PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats,VkResult result)13702 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
13703                                                                               uint32_t *pSurfaceFormatCount,
13704                                                                               VkSurfaceFormatKHR *pSurfaceFormats,
13705                                                                               VkResult result) {
13706     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
13707 
13708     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
13709     auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
13710 
13711     if (*pSurfaceFormatCount) {
13712         if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
13713         if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
13714             physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
13715     }
13716     if (pSurfaceFormats) {
13717         if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
13718         for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
13719             physical_device_state->surface_formats[i] = pSurfaceFormats[i];
13720         }
13721     }
13722 }
13723 
PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats,VkResult result)13724 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
13725                                                                                const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
13726                                                                                uint32_t *pSurfaceFormatCount,
13727                                                                                VkSurfaceFormat2KHR *pSurfaceFormats,
13728                                                                                VkResult result) {
13729     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
13730 
13731     auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
13732     if (*pSurfaceFormatCount) {
13733         if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
13734             physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
13735         }
13736         if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
13737             physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
13738     }
13739     if (pSurfaceFormats) {
13740         if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
13741             physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
13742         }
13743         for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
13744             physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
13745         }
13746     }
13747 }
13748 
PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)13749 void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
13750                                                                      const VkDebugUtilsLabelEXT *pLabelInfo) {
13751     BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
13752 }
13753 
PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)13754 void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
13755     EndCmdDebugUtilsLabel(report_data, commandBuffer);
13756 }
13757 
PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)13758 void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
13759                                                                       const VkDebugUtilsLabelEXT *pLabelInfo) {
13760     InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
13761 
13762     // Squirrel away an easily accessible copy.
13763     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13764     cb_state->debug_label = LoggingLabel(pLabelInfo);
13765 }
13766 
RecordEnumeratePhysicalDeviceGroupsState(uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupPropertiesKHR * pPhysicalDeviceGroupProperties)13767 void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
13768     uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
13769     if (NULL != pPhysicalDeviceGroupProperties) {
13770         for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
13771             for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
13772                 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
13773                 auto &phys_device_state = physical_device_map[cur_phys_dev];
13774                 phys_device_state.phys_device = cur_phys_dev;
13775                 // Init actual features for each physical device
13776                 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
13777             }
13778         }
13779     }
13780 }
13781 
PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupPropertiesKHR * pPhysicalDeviceGroupProperties,VkResult result)13782 void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
13783     VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
13784     VkResult result) {
13785     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
13786     RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
13787 }
13788 
PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupPropertiesKHR * pPhysicalDeviceGroupProperties,VkResult result)13789 void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
13790     VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
13791     VkResult result) {
13792     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
13793     RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
13794 }
13795 
ValidateDescriptorUpdateTemplate(const char * func_name,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo)13796 bool CoreChecks::ValidateDescriptorUpdateTemplate(const char *func_name,
13797                                                   const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo) {
13798     bool skip = false;
13799     const auto layout = GetDescriptorSetLayout(this, pCreateInfo->descriptorSetLayout);
13800     if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET == pCreateInfo->templateType && !layout) {
13801         const VulkanTypedHandle ds_typed(pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout);
13802         skip |=
13803             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, ds_typed.handle,
13804                     "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350",
13805                     "%s: Invalid pCreateInfo->descriptorSetLayout (%s)", func_name, report_data->FormatHandle(ds_typed).c_str());
13806     } else if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR == pCreateInfo->templateType) {
13807         auto bind_point = pCreateInfo->pipelineBindPoint;
13808         bool valid_bp = (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) || (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE);
13809         if (!valid_bp) {
13810             skip |=
13811                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
13812                         "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351",
13813                         "%s: Invalid pCreateInfo->pipelineBindPoint (%" PRIu32 ").", func_name, static_cast<uint32_t>(bind_point));
13814         }
13815         const auto pipeline_layout = GetPipelineLayout(pCreateInfo->pipelineLayout);
13816         if (!pipeline_layout) {
13817             const VulkanTypedHandle pl_typed(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout);
13818             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
13819                             pl_typed.handle, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352",
13820                             "%s: Invalid pCreateInfo->pipelineLayout (%s)", func_name, report_data->FormatHandle(pl_typed).c_str());
13821         } else {
13822             const uint32_t pd_set = pCreateInfo->set;
13823             if ((pd_set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[pd_set] ||
13824                 !pipeline_layout->set_layouts[pd_set]->IsPushDescriptor()) {
13825                 const VulkanTypedHandle pl_typed(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout);
13826                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
13827                                 pl_typed.handle, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353",
13828                                 "%s: pCreateInfo->set (%" PRIu32
13829                                 ") does not refer to the push descriptor set layout for pCreateInfo->pipelineLayout (%s).",
13830                                 func_name, pd_set, report_data->FormatHandle(pl_typed).c_str());
13831             }
13832         }
13833     }
13834     return skip;
13835 }
13836 
PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate)13837 bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device,
13838                                                                const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
13839                                                                const VkAllocationCallbacks *pAllocator,
13840                                                                VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
13841     bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplate()", pCreateInfo);
13842     return skip;
13843 }
13844 
PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate)13845 bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,
13846                                                                   const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
13847                                                                   const VkAllocationCallbacks *pAllocator,
13848                                                                   VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
13849     bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplateKHR()", pCreateInfo);
13850     return skip;
13851 }
13852 
PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)13853 void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
13854                                                                           VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13855                                                                           const VkAllocationCallbacks *pAllocator) {
13856     if (!descriptorUpdateTemplate) return;
13857     desc_template_map.erase(descriptorUpdateTemplate);
13858 }
13859 
PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)13860 void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
13861                                                                              VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13862                                                                              const VkAllocationCallbacks *pAllocator) {
13863     if (!descriptorUpdateTemplate) return;
13864     desc_template_map.erase(descriptorUpdateTemplate);
13865 }
13866 
RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate)13867 void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
13868                                                                        VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
13869     safe_VkDescriptorUpdateTemplateCreateInfo *local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfo(pCreateInfo);
13870     std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info));
13871     desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
13872 }
13873 
PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate,VkResult result)13874 void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
13875     VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
13876     VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
13877     if (VK_SUCCESS != result) return;
13878     RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
13879 }
13880 
PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate,VkResult result)13881 void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
13882     VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
13883     VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
13884     if (VK_SUCCESS != result) return;
13885     RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
13886 }
13887 
ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData)13888 bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,
13889                                                          VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13890                                                          const void *pData) {
13891     bool skip = false;
13892     auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
13893     if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
13894         // Object tracker will report errors for invalid descriptorUpdateTemplate values, avoiding a crash in release builds
13895         // but retaining the assert as template support is new enough to want to investigate these in debug builds.
13896         assert(0);
13897     } else {
13898         const TEMPLATE_STATE *template_state = template_map_entry->second.get();
13899         // TODO: Validate template push descriptor updates
13900         if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
13901             skip = ValidateUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
13902         }
13903     }
13904     return skip;
13905 }
13906 
PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)13907 bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
13908                                                                 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
13909                                                                 const void *pData) {
13910     return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
13911 }
13912 
PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData)13913 bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
13914                                                                    VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13915                                                                    const void *pData) {
13916     return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
13917 }
13918 
RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData)13919 void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
13920                                                                         VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13921                                                                         const void *pData) {
13922     auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
13923     if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
13924         assert(0);
13925     } else {
13926         const TEMPLATE_STATE *template_state = template_map_entry->second.get();
13927         // TODO: Record template push descriptor updates
13928         if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
13929             PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
13930         }
13931     }
13932 }
13933 
PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)13934 void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
13935                                                                           VkDescriptorUpdateTemplate descriptorUpdateTemplate,
13936                                                                           const void *pData) {
13937     RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
13938 }
13939 
PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData)13940 void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
13941                                                                              VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13942                                                                              const void *pData) {
13943     RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
13944 }
13945 
GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const * layout_data,uint32_t set)13946 static std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(
13947     PIPELINE_LAYOUT_STATE const *layout_data, uint32_t set) {
13948     std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
13949     if (layout_data && (set < layout_data->set_layouts.size())) {
13950         dsl = layout_data->set_layouts[set];
13951     }
13952     return dsl;
13953 }
13954 
PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,VkPipelineLayout layout,uint32_t set,const void * pData)13955 bool CoreChecks::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
13956                                                                     VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
13957                                                                     VkPipelineLayout layout, uint32_t set, const void *pData) {
13958     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13959     assert(cb_state);
13960     const char *const func_name = "vkPushDescriptorSetWithTemplateKHR()";
13961     bool skip = false;
13962     skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, func_name);
13963 
13964     auto layout_data = GetPipelineLayout(layout);
13965     auto dsl = GetDslFromPipelineLayout(layout_data, set);
13966     const VulkanTypedHandle layout_typed(layout, kVulkanObjectTypePipelineLayout);
13967 
13968     // Validate the set index points to a push descriptor set and is in range
13969     if (dsl) {
13970         if (!dsl->IsPushDescriptor()) {
13971             skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
13972                            layout_typed.handle, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
13973                            "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name, set,
13974                            report_data->FormatHandle(layout_typed).c_str());
13975         }
13976     } else if (layout_data && (set >= layout_data->set_layouts.size())) {
13977         skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
13978                        layout_typed.handle, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
13979                        "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
13980                        report_data->FormatHandle(layout_typed).c_str(), static_cast<uint32_t>(layout_data->set_layouts.size()));
13981     }
13982 
13983     const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
13984     if (template_state) {
13985         const auto &template_ci = template_state->create_info;
13986         static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
13987             std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
13988             std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
13989             std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
13990                            "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366")};
13991         skip |= ValidatePipelineBindPoint(cb_state, template_ci.pipelineBindPoint, func_name, bind_errors);
13992 
13993         if (template_ci.templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
13994             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
13995                             HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_TemplateType,
13996                             "%s: descriptorUpdateTemplate %s was not created with flag "
13997                             "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR.",
13998                             func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str());
13999         }
14000         if (template_ci.set != set) {
14001             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14002                             HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_SetMismatched,
14003                             "%s: descriptorUpdateTemplate %s created with set %" PRIu32
14004                             " does not match command parameter set %" PRIu32 ".",
14005                             func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(), template_ci.set, set);
14006         }
14007         if (!CompatForSet(set, layout_data, GetPipelineLayout(template_ci.pipelineLayout))) {
14008             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14009                             HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_LayoutMismatched,
14010                             "%s: descriptorUpdateTemplate %s created with %s is incompatible with command parameter "
14011                             "%s for set %" PRIu32,
14012                             func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(),
14013                             report_data->FormatHandle(template_ci.pipelineLayout).c_str(),
14014                             report_data->FormatHandle(layout).c_str(), set);
14015         }
14016     }
14017 
14018     if (dsl && template_state) {
14019         // Create an empty proxy in order to use the existing descriptor set update validation
14020         cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, VK_NULL_HANDLE, dsl, 0, this);
14021         // Decode the template into a set of write updates
14022         cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
14023                                                                 dsl->GetDescriptorSetLayout());
14024         // Validate the decoded update against the proxy_ds
14025         skip |= ValidatePushDescriptorsUpdate(&proxy_ds, static_cast<uint32_t>(decoded_template.desc_writes.size()),
14026                                               decoded_template.desc_writes.data(), func_name);
14027     }
14028 
14029     return skip;
14030 }
14031 
PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,VkPipelineLayout layout,uint32_t set,const void * pData)14032 void CoreChecks::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
14033                                                                   VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
14034                                                                   VkPipelineLayout layout, uint32_t set, const void *pData) {
14035     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14036 
14037     const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
14038     if (template_state) {
14039         auto layout_data = GetPipelineLayout(layout);
14040         auto dsl = GetDslFromPipelineLayout(layout_data, set);
14041         const auto &template_ci = template_state->create_info;
14042         if (dsl && !dsl->IsDestroyed()) {
14043             // Decode the template into a set of write updates
14044             cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
14045                                                                     dsl->GetDescriptorSetLayout());
14046             RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
14047                                             static_cast<uint32_t>(decoded_template.desc_writes.size()),
14048                                             decoded_template.desc_writes.data());
14049         }
14050     }
14051 }
14052 
RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,void * pProperties)14053 void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
14054                                                                                 uint32_t *pPropertyCount, void *pProperties) {
14055     auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
14056     if (*pPropertyCount) {
14057         if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
14058             physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
14059         }
14060         physical_device_state->display_plane_property_count = *pPropertyCount;
14061     }
14062     if (pProperties) {
14063         if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
14064             physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
14065         }
14066     }
14067 }
14068 
PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlanePropertiesKHR * pProperties,VkResult result)14069 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
14070                                                                                       uint32_t *pPropertyCount,
14071                                                                                       VkDisplayPlanePropertiesKHR *pProperties,
14072                                                                                       VkResult result) {
14073     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
14074     RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
14075 }
14076 
PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlaneProperties2KHR * pProperties,VkResult result)14077 void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
14078                                                                                        uint32_t *pPropertyCount,
14079                                                                                        VkDisplayPlaneProperties2KHR *pProperties,
14080                                                                                        VkResult result) {
14081     if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
14082     RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
14083 }
14084 
ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice,uint32_t planeIndex,const char * api_name) const14085 bool CoreChecks::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
14086                                                                          const char *api_name) const {
14087     bool skip = false;
14088     const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
14089     if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState == UNCALLED) {
14090         skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
14091                         HandleToUint64(physicalDevice), kVUID_Core_Swapchain_GetSupportedDisplaysWithoutQuery,
14092                         "Potential problem with calling %s() without first retrieving properties from "
14093                         "vkGetPhysicalDeviceDisplayPlanePropertiesKHR or vkGetPhysicalDeviceDisplayPlaneProperties2KHR.",
14094                         api_name);
14095     } else {
14096         if (planeIndex >= physical_device_state->display_plane_property_count) {
14097             skip |= log_msg(
14098                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
14099                 HandleToUint64(physicalDevice), "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249",
14100                 "%s(): planeIndex must be in the range [0, %d] that was returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR "
14101                 "or vkGetPhysicalDeviceDisplayPlaneProperties2KHR. Do you have the plane index hardcoded?",
14102                 api_name, physical_device_state->display_plane_property_count - 1);
14103         }
14104     }
14105     return skip;
14106 }
14107 
PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays)14108 bool CoreChecks::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
14109                                                                     uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
14110     bool skip = false;
14111     skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex,
14112                                                                     "vkGetDisplayPlaneSupportedDisplaysKHR");
14113     return skip;
14114 }
14115 
PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities)14116 bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
14117                                                                uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities) {
14118     bool skip = false;
14119     skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex, "vkGetDisplayPlaneCapabilitiesKHR");
14120     return skip;
14121 }
14122 
PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo,VkDisplayPlaneCapabilities2KHR * pCapabilities)14123 bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
14124                                                                 const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
14125                                                                 VkDisplayPlaneCapabilities2KHR *pCapabilities) {
14126     bool skip = false;
14127     skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, pDisplayPlaneInfo->planeIndex,
14128                                                                     "vkGetDisplayPlaneCapabilities2KHR");
14129     return skip;
14130 }
14131 
PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,const VkDebugMarkerMarkerInfoEXT * pMarkerInfo)14132 bool CoreChecks::PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,
14133                                                        const VkDebugMarkerMarkerInfoEXT *pMarkerInfo) {
14134     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14135     assert(cb_state);
14136     return ValidateCmd(cb_state, CMD_DEBUGMARKERBEGINEXT, "vkCmdDebugMarkerBeginEXT()");
14137 }
14138 
PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer)14139 bool CoreChecks::PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {
14140     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14141     assert(cb_state);
14142     return ValidateCmd(cb_state, CMD_DEBUGMARKERENDEXT, "vkCmdDebugMarkerEndEXT()");
14143 }
14144 
PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)14145 bool CoreChecks::PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
14146                                                         VkQueryControlFlags flags, uint32_t index) {
14147     if (disabled.query_validation) return false;
14148     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14149     assert(cb_state);
14150     QueryObject query_obj(queryPool, query, index);
14151     const char *cmd_name = "vkCmdBeginQueryIndexedEXT()";
14152     bool skip = ValidateBeginQuery(
14153         cb_state, query_obj, flags, CMD_BEGINQUERYINDEXEDEXT, cmd_name, "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool",
14154         "VUID-vkCmdBeginQueryIndexedEXT-queryType-02338", "VUID-vkCmdBeginQueryIndexedEXT-queryType-00803",
14155         "VUID-vkCmdBeginQueryIndexedEXT-queryType-00800", "VUID-vkCmdBeginQueryIndexedEXT-query-00802");
14156 
14157     // Extension specific VU's
14158     const auto &query_pool_ci = GetQueryPoolState(query_obj.pool)->createInfo;
14159     if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
14160         if (device_extensions.vk_ext_transform_feedback &&
14161             (index >= phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams)) {
14162             skip |= log_msg(
14163                 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14164                 HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQueryIndexedEXT-queryType-02339",
14165                 "%s: index %" PRIu32
14166                 " must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams %" PRIu32 ".",
14167                 cmd_name, index, phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams);
14168         }
14169     } else if (index != 0) {
14170         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14171                         HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQueryIndexedEXT-queryType-02340",
14172                         "%s: index %" PRIu32
14173                         " must be zero if %s was not created with type VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT.",
14174                         cmd_name, index, report_data->FormatHandle(queryPool).c_str());
14175     }
14176     return skip;
14177 }
14178 
PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)14179 void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
14180                                                                    uint32_t query, VkQueryControlFlags flags, uint32_t index) {
14181     QueryObject query_obj = {queryPool, query, index};
14182     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14183     RecordCmdBeginQuery(cb_state, query_obj);
14184 }
14185 
PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)14186 void CoreChecks::PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
14187                                                       VkQueryControlFlags flags, uint32_t index) {
14188     QueryObject query_obj = {queryPool, query, index};
14189     EnqueueVerifyBeginQuery(commandBuffer, query_obj);
14190 }
14191 
PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index)14192 bool CoreChecks::PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
14193                                                       uint32_t index) {
14194     if (disabled.query_validation) return false;
14195     QueryObject query_obj = {queryPool, query, index};
14196     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14197     assert(cb_state);
14198     return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERYINDEXEDEXT, "vkCmdEndQueryIndexedEXT()",
14199                                "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool", "VUID-vkCmdEndQueryIndexedEXT-None-02342");
14200 }
14201 
PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index)14202 void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
14203                                                                  uint32_t query, uint32_t index) {
14204     QueryObject query_obj = {queryPool, query, index};
14205     CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14206     RecordCmdEndQuery(cb_state, query_obj);
14207 }
14208 
PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer,uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VkRect2D * pDiscardRectangles)14209 bool CoreChecks::PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
14210                                                           uint32_t discardRectangleCount, const VkRect2D *pDiscardRectangles) {
14211     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14212     // Minimal validation for command buffer state
14213     return ValidateCmd(cb_state, CMD_SETDISCARDRECTANGLEEXT, "vkCmdSetDiscardRectangleEXT()");
14214 }
14215 
PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,const VkSampleLocationsInfoEXT * pSampleLocationsInfo)14216 bool CoreChecks::PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
14217                                                          const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
14218     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14219     // Minimal validation for command buffer state
14220     return ValidateCmd(cb_state, CMD_SETSAMPLELOCATIONSEXT, "vkCmdSetSampleLocationsEXT()");
14221 }
14222 
ValidateCreateSamplerYcbcrConversion(const char * func_name,const VkSamplerYcbcrConversionCreateInfo * create_info) const14223 bool CoreChecks::ValidateCreateSamplerYcbcrConversion(const char *func_name,
14224                                                       const VkSamplerYcbcrConversionCreateInfo *create_info) const {
14225     bool skip = false;
14226     if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
14227         skip |= ValidateCreateSamplerYcbcrConversionANDROID(create_info);
14228     } else {  // Not android hardware buffer
14229         if (VK_FORMAT_UNDEFINED == create_info->format) {
14230             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
14231                             "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649",
14232                             "%s: CreateInfo format type is VK_FORMAT_UNDEFINED.", func_name);
14233         }
14234     }
14235     return skip;
14236 }
14237 
PreCallValidateCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)14238 bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
14239                                                              const VkAllocationCallbacks *pAllocator,
14240                                                              VkSamplerYcbcrConversion *pYcbcrConversion) {
14241     return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversion()", pCreateInfo);
14242 }
14243 
PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)14244 bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device,
14245                                                                 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
14246                                                                 const VkAllocationCallbacks *pAllocator,
14247                                                                 VkSamplerYcbcrConversion *pYcbcrConversion) {
14248     return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversionKHR()", pCreateInfo);
14249 }
14250 
RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo * create_info,VkSamplerYcbcrConversion ycbcr_conversion)14251 void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
14252                                                                      VkSamplerYcbcrConversion ycbcr_conversion) {
14253     if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
14254         RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
14255     }
14256 }
14257 
PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion,VkResult result)14258 void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
14259                                                                         const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
14260                                                                         const VkAllocationCallbacks *pAllocator,
14261                                                                         VkSamplerYcbcrConversion *pYcbcrConversion,
14262                                                                         VkResult result) {
14263     if (VK_SUCCESS != result) return;
14264     RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
14265 }
14266 
PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion,VkResult result)14267 void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
14268                                                                            const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
14269                                                                            const VkAllocationCallbacks *pAllocator,
14270                                                                            VkSamplerYcbcrConversion *pYcbcrConversion,
14271                                                                            VkResult result) {
14272     if (VK_SUCCESS != result) return;
14273     RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
14274 }
14275 
PostCallRecordDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)14276 void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
14277                                                                          const VkAllocationCallbacks *pAllocator) {
14278     if (!ycbcrConversion) return;
14279     if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
14280         RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
14281     }
14282 }
14283 
PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)14284 void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
14285                                                                             VkSamplerYcbcrConversion ycbcrConversion,
14286                                                                             const VkAllocationCallbacks *pAllocator) {
14287     if (!ycbcrConversion) return;
14288     if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
14289         RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
14290     }
14291 }
14292 
PreCallValidateGetBufferDeviceAddressEXT(VkDevice device,const VkBufferDeviceAddressInfoEXT * pInfo)14293 bool CoreChecks::PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT *pInfo) {
14294     bool skip = false;
14295 
14296     if (!enabled_features.buffer_address.bufferDeviceAddress) {
14297         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
14298                         HandleToUint64(pInfo->buffer), "VUID-vkGetBufferDeviceAddressEXT-None-02598",
14299                         "The bufferDeviceAddress feature must: be enabled.");
14300     }
14301 
14302     if (physical_device_count > 1 && !enabled_features.buffer_address.bufferDeviceAddressMultiDevice) {
14303         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
14304                         HandleToUint64(pInfo->buffer), "VUID-vkGetBufferDeviceAddressEXT-device-02599",
14305                         "If device was created with multiple physical devices, then the "
14306                         "bufferDeviceAddressMultiDevice feature must: be enabled.");
14307     }
14308 
14309     const auto buffer_state = GetBufferState(pInfo->buffer);
14310     if (buffer_state) {
14311         if (!(buffer_state->createInfo.flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT)) {
14312             skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkGetBufferDeviceAddressEXT()",
14313                                                   "VUID-VkBufferDeviceAddressInfoEXT-buffer-02600");
14314         }
14315 
14316         skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, true,
14317                                          "VUID-VkBufferDeviceAddressInfoEXT-buffer-02601", "vkGetBufferDeviceAddressEXT()",
14318                                          "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT");
14319     }
14320 
14321     return skip;
14322 }
14323 
ValidateQueryRange(VkDevice device,VkQueryPool queryPool,uint32_t totalCount,uint32_t firstQuery,uint32_t queryCount,const char * vuid_badfirst,const char * vuid_badrange) const14324 bool CoreChecks::ValidateQueryRange(VkDevice device, VkQueryPool queryPool, uint32_t totalCount, uint32_t firstQuery,
14325                                     uint32_t queryCount, const char *vuid_badfirst, const char *vuid_badrange) const {
14326     bool skip = false;
14327 
14328     if (firstQuery >= totalCount) {
14329         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
14330                         vuid_badfirst, "firstQuery (%" PRIu32 ") greater than or equal to query pool count (%" PRIu32 ") for %s",
14331                         firstQuery, totalCount, report_data->FormatHandle(queryPool).c_str());
14332     }
14333 
14334     if ((firstQuery + queryCount) > totalCount) {
14335         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
14336                         vuid_badrange, "Query range [%" PRIu32 ", %" PRIu32 ") goes beyond query pool count (%" PRIu32 ") for %s",
14337                         firstQuery, firstQuery + queryCount, totalCount, report_data->FormatHandle(queryPool).c_str());
14338     }
14339 
14340     return skip;
14341 }
14342 
PreCallValidateResetQueryPoolEXT(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)14343 bool CoreChecks::PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
14344                                                   uint32_t queryCount) {
14345     if (disabled.query_validation) return false;
14346 
14347     bool skip = false;
14348 
14349     if (!enabled_features.host_query_reset_features.hostQueryReset) {
14350         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
14351                         "VUID-vkResetQueryPoolEXT-None-02665", "Host query reset not enabled for device");
14352     }
14353 
14354     const auto query_pool_state = GetQueryPoolState(queryPool);
14355     if (query_pool_state) {
14356         skip |= ValidateQueryRange(device, queryPool, query_pool_state->createInfo.queryCount, firstQuery, queryCount,
14357                                    "VUID-vkResetQueryPoolEXT-firstQuery-02666", "VUID-vkResetQueryPoolEXT-firstQuery-02667");
14358     }
14359 
14360     return skip;
14361 }
14362 
PostCallRecordResetQueryPoolEXT(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)14363 void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
14364                                                              uint32_t queryCount) {
14365     // Do nothing if the feature is not enabled.
14366     if (!enabled_features.host_query_reset_features.hostQueryReset) return;
14367 
14368     // Do nothing if the query pool has been destroyed.
14369     auto query_pool_state = GetQueryPoolState(queryPool);
14370     if (!query_pool_state) return;
14371 
14372     // Reset the state of existing entries.
14373     QueryObject query_obj{queryPool, 0};
14374     const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
14375     for (uint32_t i = 0; i < max_query_count; ++i) {
14376         query_obj.query = firstQuery + i;
14377         auto query_it = queryToStateMap.find(query_obj);
14378         if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
14379     }
14380 }
14381 
PreCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pPhysicalDeviceProperties)14382 void CoreChecks::PreCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
14383                                                           VkPhysicalDeviceProperties *pPhysicalDeviceProperties) {
14384     // There is an implicit layer that can cause this call to return 0 for maxBoundDescriptorSets - Ignore such calls
14385     if (enabled.gpu_validation && enabled.gpu_validation_reserve_binding_slot &&
14386         pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 0) {
14387         if (pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 1) {
14388             pPhysicalDeviceProperties->limits.maxBoundDescriptorSets -= 1;
14389         } else {
14390             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
14391                     HandleToUint64(physicalDevice), "UNASSIGNED-GPU-Assisted Validation Setup Error.",
14392                     "Unable to reserve descriptor binding slot on a device with only one slot.");
14393         }
14394     }
14395 }
14396 
CoreLayerCreateValidationCacheEXT(VkDevice device,const VkValidationCacheCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkValidationCacheEXT * pValidationCache)14397 VkResult CoreChecks::CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT *pCreateInfo,
14398                                                        const VkAllocationCallbacks *pAllocator,
14399                                                        VkValidationCacheEXT *pValidationCache) {
14400     *pValidationCache = ValidationCache::Create(pCreateInfo);
14401     return *pValidationCache ? VK_SUCCESS : VK_ERROR_INITIALIZATION_FAILED;
14402 }
14403 
CoreLayerDestroyValidationCacheEXT(VkDevice device,VkValidationCacheEXT validationCache,const VkAllocationCallbacks * pAllocator)14404 void CoreChecks::CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache,
14405                                                     const VkAllocationCallbacks *pAllocator) {
14406     delete CastFromHandle<ValidationCache *>(validationCache);
14407 }
14408 
CoreLayerGetValidationCacheDataEXT(VkDevice device,VkValidationCacheEXT validationCache,size_t * pDataSize,void * pData)14409 VkResult CoreChecks::CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t *pDataSize,
14410                                                         void *pData) {
14411     size_t inSize = *pDataSize;
14412     CastFromHandle<ValidationCache *>(validationCache)->Write(pDataSize, pData);
14413     return (pData && *pDataSize != inSize) ? VK_INCOMPLETE : VK_SUCCESS;
14414 }
14415 
CoreLayerMergeValidationCachesEXT(VkDevice device,VkValidationCacheEXT dstCache,uint32_t srcCacheCount,const VkValidationCacheEXT * pSrcCaches)14416 VkResult CoreChecks::CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount,
14417                                                        const VkValidationCacheEXT *pSrcCaches) {
14418     bool skip = false;
14419     auto dst = CastFromHandle<ValidationCache *>(dstCache);
14420     VkResult result = VK_SUCCESS;
14421     for (uint32_t i = 0; i < srcCacheCount; i++) {
14422         auto src = CastFromHandle<const ValidationCache *>(pSrcCaches[i]);
14423         if (src == dst) {
14424             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, 0,
14425                             "VUID-vkMergeValidationCachesEXT-dstCache-01536",
14426                             "vkMergeValidationCachesEXT: dstCache (0x%" PRIx64 ") must not appear in pSrcCaches array.",
14427                             HandleToUint64(dstCache));
14428             result = VK_ERROR_VALIDATION_FAILED_EXT;
14429         }
14430         if (!skip) {
14431             dst->Merge(src);
14432         }
14433     }
14434 
14435     return result;
14436 }
14437 
PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)14438 bool CoreChecks::PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {
14439     bool skip = false;
14440     const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
14441 
14442     skip |= ValidateDeviceMaskToPhysicalDeviceCount(deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14443                                                     HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00108");
14444     skip |= ValidateDeviceMaskToZero(deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
14445                                      "VUID-vkCmdSetDeviceMask-deviceMask-00109");
14446     skip |= ValidateDeviceMaskToCommandBuffer(cb_state, deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14447                                               HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00110");
14448     if (cb_state->activeRenderPass) {
14449         skip |= ValidateDeviceMaskToRenderPass(cb_state, deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14450                                                HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00111");
14451     }
14452     return skip;
14453 }
14454 
ValidateQueryPoolStride(const std::string & vuid_not_64,const std::string & vuid_64,const VkDeviceSize stride,const char * parameter_name,const uint64_t parameter_value,const VkQueryResultFlags flags) const14455 bool CoreChecks::ValidateQueryPoolStride(const std::string &vuid_not_64, const std::string &vuid_64, const VkDeviceSize stride,
14456                                          const char *parameter_name, const uint64_t parameter_value,
14457                                          const VkQueryResultFlags flags) const {
14458     bool skip = false;
14459     if (flags & VK_QUERY_RESULT_64_BIT) {
14460         static const int condition_multiples = 0b0111;
14461         if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
14462             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid_64,
14463                             "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name, parameter_value);
14464         }
14465     } else {
14466         static const int condition_multiples = 0b0011;
14467         if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
14468             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid_not_64,
14469                             "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name, parameter_value);
14470         }
14471     }
14472     return skip;
14473 }
14474 
ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer,const std::string & vuid,const uint32_t stride,const char * struct_name,const uint32_t struct_size) const14475 bool CoreChecks::ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
14476                                                  const char *struct_name, const uint32_t struct_size) const {
14477     bool skip = false;
14478     static const int condition_multiples = 0b0011;
14479     if ((stride & condition_multiples) || (stride < struct_size)) {
14480         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14481                         HandleToUint64(commandBuffer), vuid, "stride %d is invalid or less than sizeof(%s) %d.", stride,
14482                         struct_name, struct_size);
14483     }
14484     return skip;
14485 }
14486 
ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer,const std::string & vuid,const uint32_t stride,const char * struct_name,const uint32_t struct_size,const uint32_t drawCount,const VkDeviceSize offset,const BUFFER_STATE * buffer_state) const14487 bool CoreChecks::ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
14488                                                  const char *struct_name, const uint32_t struct_size, const uint32_t drawCount,
14489                                                  const VkDeviceSize offset, const BUFFER_STATE *buffer_state) const {
14490     bool skip = false;
14491     uint64_t validation_value = stride * (drawCount - 1) + offset + struct_size;
14492     if (validation_value > buffer_state->createInfo.size) {
14493         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
14494                         HandleToUint64(commandBuffer), vuid,
14495                         "stride[%d] * (drawCount[%d] - 1) + offset[%" PRIx64 "] + sizeof(%s)[%d] = %" PRIx64
14496                         " is greater than the size[%" PRIx64 "] of %s.",
14497                         stride, drawCount, offset, struct_name, struct_size, validation_value, buffer_state->createInfo.size,
14498                         report_data->FormatHandle(buffer_state->buffer).c_str());
14499     }
14500     return skip;
14501 }
14502 
initGraphicsPipeline(ValidationStateTracker * state_data,const VkGraphicsPipelineCreateInfo * pCreateInfo,std::shared_ptr<RENDER_PASS_STATE> && rpstate)14503 void PIPELINE_STATE::initGraphicsPipeline(ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
14504                                           std::shared_ptr<RENDER_PASS_STATE> &&rpstate) {
14505     reset();
14506     bool uses_color_attachment = false;
14507     bool uses_depthstencil_attachment = false;
14508     if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
14509         const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
14510 
14511         for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
14512             if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
14513                 uses_color_attachment = true;
14514                 break;
14515             }
14516         }
14517 
14518         if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
14519             uses_depthstencil_attachment = true;
14520         }
14521     }
14522     graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
14523     if (graphicsPipelineCI.pInputAssemblyState) {
14524         topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology;
14525     }
14526 
14527     stage_state.resize(pCreateInfo->stageCount);
14528     for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
14529         const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
14530         this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
14531         this->active_shaders |= pPSSCI->stage;
14532         state_data->RecordPipelineShaderStage(pPSSCI, this, &stage_state[i]);
14533     }
14534 
14535     if (graphicsPipelineCI.pVertexInputState) {
14536         const auto pVICI = graphicsPipelineCI.pVertexInputState;
14537         if (pVICI->vertexBindingDescriptionCount) {
14538             this->vertex_binding_descriptions_ = std::vector<VkVertexInputBindingDescription>(
14539                 pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
14540 
14541             this->vertex_binding_to_index_map_.reserve(pVICI->vertexBindingDescriptionCount);
14542             for (uint32_t i = 0; i < pVICI->vertexBindingDescriptionCount; ++i) {
14543                 this->vertex_binding_to_index_map_[pVICI->pVertexBindingDescriptions[i].binding] = i;
14544             }
14545         }
14546         if (pVICI->vertexAttributeDescriptionCount) {
14547             this->vertex_attribute_descriptions_ = std::vector<VkVertexInputAttributeDescription>(
14548                 pVICI->pVertexAttributeDescriptions, pVICI->pVertexAttributeDescriptions + pVICI->vertexAttributeDescriptionCount);
14549         }
14550     }
14551     if (graphicsPipelineCI.pColorBlendState) {
14552         const auto pCBCI = graphicsPipelineCI.pColorBlendState;
14553         if (pCBCI->attachmentCount) {
14554             this->attachments =
14555                 std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments, pCBCI->pAttachments + pCBCI->attachmentCount);
14556         }
14557     }
14558     rp_state = rpstate;
14559 }
14560 
initComputePipeline(ValidationStateTracker * state_data,const VkComputePipelineCreateInfo * pCreateInfo)14561 void PIPELINE_STATE::initComputePipeline(ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo) {
14562     reset();
14563     computePipelineCI.initialize(pCreateInfo);
14564     switch (computePipelineCI.stage.stage) {
14565         case VK_SHADER_STAGE_COMPUTE_BIT:
14566             this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
14567             stage_state.resize(1);
14568             state_data->RecordPipelineShaderStage(&pCreateInfo->stage, this, &stage_state[0]);
14569             break;
14570         default:
14571             // TODO : Flag error
14572             break;
14573     }
14574 }
14575 
initRayTracingPipelineNV(ValidationStateTracker * state_data,const VkRayTracingPipelineCreateInfoNV * pCreateInfo)14576 void PIPELINE_STATE::initRayTracingPipelineNV(ValidationStateTracker *state_data,
14577                                               const VkRayTracingPipelineCreateInfoNV *pCreateInfo) {
14578     reset();
14579     raytracingPipelineCI.initialize(pCreateInfo);
14580 
14581     stage_state.resize(pCreateInfo->stageCount);
14582     for (uint32_t stage_index = 0; stage_index < pCreateInfo->stageCount; stage_index++) {
14583         const auto &shader_stage = pCreateInfo->pStages[stage_index];
14584         switch (shader_stage.stage) {
14585             case VK_SHADER_STAGE_RAYGEN_BIT_NV:
14586                 this->active_shaders |= VK_SHADER_STAGE_RAYGEN_BIT_NV;
14587                 break;
14588             case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
14589                 this->active_shaders |= VK_SHADER_STAGE_ANY_HIT_BIT_NV;
14590                 break;
14591             case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
14592                 this->active_shaders |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
14593                 break;
14594             case VK_SHADER_STAGE_MISS_BIT_NV:
14595                 this->active_shaders |= VK_SHADER_STAGE_MISS_BIT_NV;
14596                 break;
14597             case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
14598                 this->active_shaders |= VK_SHADER_STAGE_INTERSECTION_BIT_NV;
14599                 break;
14600             case VK_SHADER_STAGE_CALLABLE_BIT_NV:
14601                 this->active_shaders |= VK_SHADER_STAGE_CALLABLE_BIT_NV;
14602                 break;
14603             default:
14604                 // TODO : Flag error
14605                 break;
14606         }
14607         state_data->RecordPipelineShaderStage(&shader_stage, this, &stage_state[stage_index]);
14608     }
14609 }
14610