1#!/usr/bin/python3 -i 2# 3# Copyright (c) 2015-2019 Valve Corporation 4# Copyright (c) 2015-2019 LunarG, Inc. 5# Copyright (c) 2015-2019 Google Inc. 6# 7# Licensed under the Apache License, Version 2.0 (the "License"); 8# you may not use this file except in compliance with the License. 9# You may obtain a copy of the License at 10# 11# http://www.apache.org/licenses/LICENSE-2.0 12# 13# Unless required by applicable law or agreed to in writing, software 14# distributed under the License is distributed on an "AS IS" BASIS, 15# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16# See the License for the specific language governing permissions and 17# limitations under the License. 18# 19# Author: Tobin Ehlis <tobine@google.com> 20# Author: Mark Lobodzinski <mark@lunarg.com> 21# 22# This script generates the dispatch portion of a factory layer which intercepts 23# all Vulkan functions. The resultant factory layer allows rapid development of 24# layers and interceptors. 25 26import os,re,sys 27from generator import * 28from common_codegen import * 29 30# LayerFactoryGeneratorOptions - subclass of GeneratorOptions. 31# 32# Adds options used by LayerFactoryOutputGenerator objects during factory 33# layer generation. 34# 35# Additional members 36# prefixText - list of strings to prefix generated header with 37# (usually a copyright statement + calling convention macros). 38# protectFile - True if multiple inclusion protection should be 39# generated (based on the filename) around the entire header. 40# protectFeature - True if #ifndef..#endif protection should be 41# generated around a feature interface in the header file. 42# genFuncPointers - True if function pointer typedefs should be 43# generated 44# protectProto - If conditional protection should be generated 45# around prototype declarations, set to either '#ifdef' 46# to require opt-in (#ifdef protectProtoStr) or '#ifndef' 47# to require opt-out (#ifndef protectProtoStr). Otherwise 48# set to None. 49# protectProtoStr - #ifdef/#ifndef symbol to use around prototype 50# declarations, if protectProto is set 51# apicall - string to use for the function declaration prefix, 52# such as APICALL on Windows. 53# apientry - string to use for the calling convention macro, 54# in typedefs, such as APIENTRY. 55# apientryp - string to use for the calling convention macro 56# in function pointer typedefs, such as APIENTRYP. 57# indentFuncProto - True if prototype declarations should put each 58# parameter on a separate line 59# indentFuncPointer - True if typedefed function pointers should put each 60# parameter on a separate line 61# alignFuncParam - if nonzero and parameters are being put on a 62# separate line, align parameter names at the specified column 63class LayerChassisGeneratorOptions(GeneratorOptions): 64 def __init__(self, 65 conventions = None, 66 filename = None, 67 directory = '.', 68 apiname = None, 69 profile = None, 70 versions = '.*', 71 emitversions = '.*', 72 defaultExtensions = None, 73 addExtensions = None, 74 removeExtensions = None, 75 emitExtensions = None, 76 sortProcedure = regSortFeatures, 77 prefixText = "", 78 genFuncPointers = True, 79 protectFile = True, 80 protectFeature = True, 81 apicall = '', 82 apientry = '', 83 apientryp = '', 84 indentFuncProto = True, 85 indentFuncPointer = False, 86 alignFuncParam = 0, 87 helper_file_type = '', 88 expandEnumerants = True): 89 GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile, 90 versions, emitversions, defaultExtensions, 91 addExtensions, removeExtensions, emitExtensions, sortProcedure) 92 self.prefixText = prefixText 93 self.genFuncPointers = genFuncPointers 94 self.protectFile = protectFile 95 self.protectFeature = protectFeature 96 self.apicall = apicall 97 self.apientry = apientry 98 self.apientryp = apientryp 99 self.indentFuncProto = indentFuncProto 100 self.indentFuncPointer = indentFuncPointer 101 self.alignFuncParam = alignFuncParam 102 103# LayerChassisOutputGenerator - subclass of OutputGenerator. 104# Generates a LayerFactory layer that intercepts all API entrypoints 105# This is intended to be used as a starting point for creating custom layers 106# 107# ---- methods ---- 108# LayerChassisOutputGenerator(errFile, warnFile, diagFile) - args as for 109# OutputGenerator. Defines additional internal state. 110# ---- methods overriding base class ---- 111# beginFile(genOpts) 112# endFile() 113# beginFeature(interface, emit) 114# endFeature() 115# genType(typeinfo,name) 116# genStruct(typeinfo,name) 117# genGroup(groupinfo,name) 118# genEnum(enuminfo, name) 119# genCmd(cmdinfo) 120class LayerChassisOutputGenerator(OutputGenerator): 121 """Generate specified API interfaces in a specific style, such as a C header""" 122 # This is an ordered list of sections in the header file. 123 TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum', 124 'group', 'bitmask', 'funcpointer', 'struct'] 125 ALL_SECTIONS = TYPE_SECTIONS + ['command'] 126 127 manual_functions = [ 128 # Include functions here to be interecpted w/ manually implemented function bodies 129 'vkGetDeviceProcAddr', 130 'vkGetInstanceProcAddr', 131 'vkCreateDevice', 132 'vkDestroyDevice', 133 'vkCreateInstance', 134 'vkDestroyInstance', 135 'vkEnumerateInstanceLayerProperties', 136 'vkEnumerateInstanceExtensionProperties', 137 'vkEnumerateDeviceLayerProperties', 138 'vkEnumerateDeviceExtensionProperties', 139 # Functions that are handled explicitly due to chassis architecture violations 140 'vkCreateGraphicsPipelines', 141 'vkCreateComputePipelines', 142 'vkCreateRayTracingPipelinesNV', 143 'vkCreatePipelineLayout', 144 'vkCreateShaderModule', 145 'vkAllocateDescriptorSets', 146 # ValidationCache functions do not get dispatched 147 'vkCreateValidationCacheEXT', 148 'vkDestroyValidationCacheEXT', 149 'vkMergeValidationCachesEXT', 150 'vkGetValidationCacheDataEXT', 151 # We don't wanna hook this function 152 'vkGetPhysicalDeviceProcAddr', 153 ] 154 155 alt_ret_codes = [ 156 # Include functions here which must tolerate VK_INCOMPLETE as a return code 157 'vkEnumeratePhysicalDevices', 158 'vkEnumeratePhysicalDeviceGroupsKHR', 159 'vkGetValidationCacheDataEXT', 160 'vkGetPipelineCacheData', 161 'vkGetShaderInfoAMD', 162 'vkGetPhysicalDeviceDisplayPropertiesKHR', 163 'vkGetPhysicalDeviceDisplayProperties2KHR', 164 'vkGetPhysicalDeviceDisplayPlanePropertiesKHR', 165 'vkGetDisplayPlaneSupportedDisplaysKHR', 166 'vkGetDisplayModePropertiesKHR', 167 'vkGetDisplayModeProperties2KHR', 168 'vkGetPhysicalDeviceSurfaceFormatsKHR', 169 'vkGetPhysicalDeviceSurfacePresentModesKHR', 170 'vkGetPhysicalDevicePresentRectanglesKHR', 171 'vkGetPastPresentationTimingGOOGLE', 172 'vkGetSwapchainImagesKHR', 173 'vkEnumerateInstanceLayerProperties', 174 'vkEnumerateDeviceLayerProperties', 175 'vkEnumerateInstanceExtensionProperties', 176 'vkEnumerateDeviceExtensionProperties', 177 'vkGetPhysicalDeviceCalibrateableTimeDomainsEXT', 178 ] 179 180 pre_dispatch_debug_utils_functions = { 181 'vkDebugMarkerSetObjectNameEXT' : 'layer_data->report_data->DebugReportSetMarkerObjectName(pNameInfo);', 182 'vkSetDebugUtilsObjectNameEXT' : 'layer_data->report_data->DebugReportSetUtilsObjectName(pNameInfo);', 183 'vkQueueBeginDebugUtilsLabelEXT' : 'BeginQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);', 184 'vkQueueInsertDebugUtilsLabelEXT' : 'InsertQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);', 185 } 186 187 post_dispatch_debug_utils_functions = { 188 'vkQueueEndDebugUtilsLabelEXT' : 'EndQueueDebugUtilsLabel(layer_data->report_data, queue);', 189 'vkCreateDebugReportCallbackEXT' : 'layer_create_report_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pCallback);', 190 'vkDestroyDebugReportCallbackEXT' : 'layer_destroy_report_callback(layer_data->report_data, callback, pAllocator);', 191 'vkCreateDebugUtilsMessengerEXT' : 'layer_create_messenger_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pMessenger);', 192 'vkDestroyDebugUtilsMessengerEXT' : 'layer_destroy_messenger_callback(layer_data->report_data, messenger, pAllocator);', 193 } 194 195 precallvalidate_loop = "for (auto intercept : layer_data->object_dispatch) {" 196 precallrecord_loop = precallvalidate_loop 197 postcallrecord_loop = "for (auto intercept : layer_data->object_dispatch) {" 198 199 inline_custom_header_preamble = """ 200#define NOMINMAX 201#include <atomic> 202#include <mutex> 203#include <cinttypes> 204#include <stdio.h> 205#include <stdlib.h> 206#include <string.h> 207#include <unordered_map> 208#include <unordered_set> 209#include <algorithm> 210#include <memory> 211 212#include "vk_loader_platform.h" 213#include "vulkan/vulkan.h" 214#include "vk_layer_config.h" 215#include "vk_layer_data.h" 216#include "vk_layer_logging.h" 217#include "vk_object_types.h" 218#include "vulkan/vk_layer.h" 219#include "vk_enum_string_helper.h" 220#include "vk_layer_extension_utils.h" 221#include "vk_layer_utils.h" 222#include "vulkan/vk_layer.h" 223#include "vk_dispatch_table_helper.h" 224#include "vk_extension_helper.h" 225#include "vk_safe_struct.h" 226#include "vk_typemap_helper.h" 227 228 229extern std::atomic<uint64_t> global_unique_id; 230extern vl_concurrent_unordered_map<uint64_t, uint64_t, 4> unique_id_mapping; 231""" 232 233 inline_custom_header_class_definition = """ 234 235// Layer object type identifiers 236enum LayerObjectTypeId { 237 LayerObjectTypeInstance, // Container for an instance dispatch object 238 LayerObjectTypeDevice, // Container for a device dispatch object 239 LayerObjectTypeThreading, // Instance or device threading layer object 240 LayerObjectTypeParameterValidation, // Instance or device parameter validation layer object 241 LayerObjectTypeObjectTracker, // Instance or device object tracker layer object 242 LayerObjectTypeCoreValidation, // Instance or device core validation layer object 243 LayerObjectTypeBestPractices, // Instance or device best practices layer object 244}; 245 246struct TEMPLATE_STATE { 247 VkDescriptorUpdateTemplateKHR desc_update_template; 248 safe_VkDescriptorUpdateTemplateCreateInfo create_info; 249 250 TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo) 251 : desc_update_template(update_template), create_info(*pCreateInfo) {} 252}; 253 254class LAYER_PHYS_DEV_PROPERTIES { 255public: 256 VkPhysicalDeviceProperties properties; 257 std::vector<VkQueueFamilyProperties> queue_family_properties; 258}; 259 260typedef enum ValidationCheckDisables { 261 VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE, 262 VALIDATION_CHECK_DISABLE_OBJECT_IN_USE, 263 VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET, 264 VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE, 265 VALIDATION_CHECK_DISABLE_QUERY_VALIDATION, 266 VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION, 267} ValidationCheckDisables; 268 269typedef enum VkValidationFeatureEnable { 270 VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES, 271} VkValidationFeatureEnable; 272 273 274// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed. 275// These bools are all "false" by default meaning that all checks are enabled. Enum values can be specified 276// via the vk_layer_setting.txt config file or at CreateInstance time via the VK_EXT_validation_features extension 277// that can selectively disable checks. 278struct CHECK_DISABLED { 279 bool command_buffer_state; // Skip command buffer state validation 280 bool object_in_use; // Skip all object in_use checking 281 bool idle_descriptor_set; // Skip check to verify that descriptor set is not in-use 282 bool push_constant_range; // Skip push constant range checks 283 bool query_validation; // Disable all core validation query-related checks 284 bool image_layout_validation; // Disable image layout validation 285 bool object_tracking; // Disable object lifetime validation 286 bool core_checks; // Disable core validation checks 287 bool thread_safety; // Disable thread safety validation 288 bool stateless_checks; // Disable stateless validation checks 289 bool handle_wrapping; // Disable unique handles/handle wrapping 290 bool shader_validation; // Skip validation for shaders 291 292 void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); } 293}; 294 295struct CHECK_ENABLED { 296 bool gpu_validation; 297 bool gpu_validation_reserve_binding_slot; 298 bool best_practices; 299 300 void SetAll(bool value) { std::fill(&gpu_validation, &gpu_validation_reserve_binding_slot + 1, value); } 301}; 302 303// Layer chassis validation object base class definition 304class ValidationObject { 305 public: 306 uint32_t api_version; 307 debug_report_data* report_data = nullptr; 308 std::vector<VkDebugReportCallbackEXT> logging_callback; 309 std::vector<VkDebugUtilsMessengerEXT> logging_messenger; 310 311 VkLayerInstanceDispatchTable instance_dispatch_table; 312 VkLayerDispatchTable device_dispatch_table; 313 314 InstanceExtensions instance_extensions; 315 DeviceExtensions device_extensions = {}; 316 CHECK_DISABLED disabled = {}; 317 CHECK_ENABLED enabled = {}; 318 319 VkInstance instance = VK_NULL_HANDLE; 320 VkPhysicalDevice physical_device = VK_NULL_HANDLE; 321 VkDevice device = VK_NULL_HANDLE; 322 LAYER_PHYS_DEV_PROPERTIES phys_dev_properties = {}; 323 324 std::vector<ValidationObject*> object_dispatch; 325 LayerObjectTypeId container_type; 326 327 std::string layer_name = "CHASSIS"; 328 329 // Constructor 330 ValidationObject(){}; 331 // Destructor 332 virtual ~ValidationObject() {}; 333 334 std::mutex validation_object_mutex; 335 virtual std::unique_lock<std::mutex> write_lock() { 336 return std::unique_lock<std::mutex>(validation_object_mutex); 337 } 338 339 ValidationObject* GetValidationObject(std::vector<ValidationObject*>& object_dispatch, LayerObjectTypeId object_type) { 340 for (auto validation_object : object_dispatch) { 341 if (validation_object->container_type == object_type) { 342 return validation_object; 343 } 344 } 345 return nullptr; 346 }; 347 348 // Handle Wrapping Data 349 // Reverse map display handles 350 vl_concurrent_unordered_map<VkDisplayKHR, uint64_t, 0> display_id_reverse_mapping; 351 // Wrapping Descriptor Template Update structures requires access to the template createinfo structs 352 std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_createinfo_map; 353 struct SubpassesUsageStates { 354 std::unordered_set<uint32_t> subpasses_using_color_attachment; 355 std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment; 356 }; 357 // Uses unwrapped handles 358 std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states; 359 // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs 360 // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist 361 std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map; 362 // Map of wrapped descriptor pools to set of wrapped descriptor sets allocated from each pool 363 std::unordered_map<VkDescriptorPool, std::unordered_set<VkDescriptorSet>> pool_descriptor_sets_map; 364 365 366 // Unwrap a handle. 367 template <typename HandleType> 368 HandleType Unwrap(HandleType wrappedHandle) { 369 auto iter = unique_id_mapping.find(reinterpret_cast<uint64_t const &>(wrappedHandle)); 370 if (iter == unique_id_mapping.end()) 371 return (HandleType)0; 372 return (HandleType)iter->second; 373 } 374 375 // Wrap a newly created handle with a new unique ID, and return the new ID. 376 template <typename HandleType> 377 HandleType WrapNew(HandleType newlyCreatedHandle) { 378 auto unique_id = global_unique_id++; 379 unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle)); 380 return (HandleType)unique_id; 381 } 382 383 // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup. 384 VkDisplayKHR WrapDisplay(VkDisplayKHR newlyCreatedHandle, ValidationObject *map_data) { 385 auto unique_id = global_unique_id++; 386 unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle)); 387 map_data->display_id_reverse_mapping.insert_or_assign(newlyCreatedHandle, unique_id); 388 return (VkDisplayKHR)unique_id; 389 } 390 391 // VkDisplayKHR objects don't have a single point of creation, so we need to see if one already exists in the map before 392 // creating another. 393 VkDisplayKHR MaybeWrapDisplay(VkDisplayKHR handle, ValidationObject *map_data) { 394 // See if this display is already known 395 auto it = map_data->display_id_reverse_mapping.find(handle); 396 if (it != map_data->display_id_reverse_mapping.end()) return (VkDisplayKHR)it->second; 397 // Unknown, so wrap 398 return WrapDisplay(handle, map_data); 399 } 400 401 // Pre/post hook point declarations 402""" 403 404 inline_copyright_message = """ 405// This file is ***GENERATED***. Do Not Edit. 406// See layer_chassis_generator.py for modifications. 407 408/* Copyright (c) 2015-2019 The Khronos Group Inc. 409 * Copyright (c) 2015-2019 Valve Corporation 410 * Copyright (c) 2015-2019 LunarG, Inc. 411 * Copyright (c) 2015-2019 Google Inc. 412 * 413 * Licensed under the Apache License, Version 2.0 (the "License"); 414 * you may not use this file except in compliance with the License. 415 * You may obtain a copy of the License at 416 * 417 * http://www.apache.org/licenses/LICENSE-2.0 418 * 419 * Unless required by applicable law or agreed to in writing, software 420 * distributed under the License is distributed on an "AS IS" BASIS, 421 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 422 * See the License for the specific language governing permissions and 423 * limitations under the License. 424 * 425 * Author: Mark Lobodzinski <mark@lunarg.com> 426 */""" 427 428 inline_custom_source_preamble = """ 429 430#include <string.h> 431#include <mutex> 432 433#define VALIDATION_ERROR_MAP_IMPL 434 435#include "chassis.h" 436#include "layer_chassis_dispatch.h" 437 438std::unordered_map<void*, ValidationObject*> layer_data_map; 439 440// Global unique object identifier. 441std::atomic<uint64_t> global_unique_id(1ULL); 442// Map uniqueID to actual object handle. Accesses to the map itself are 443// internally synchronized. 444vl_concurrent_unordered_map<uint64_t, uint64_t, 4> unique_id_mapping; 445 446// TODO: This variable controls handle wrapping -- in the future it should be hooked 447// up to the new VALIDATION_FEATURES extension. Temporarily, control with a compile-time flag. 448#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES) 449bool wrap_handles = true; 450#else 451bool wrap_handles = false; 452#endif 453 454// Set layer name -- Khronos layer name overrides any other defined names 455#if BUILD_KHRONOS_VALIDATION 456#define OBJECT_LAYER_NAME "VK_LAYER_KHRONOS_validation" 457#define OBJECT_LAYER_DESCRIPTION "khronos_validation" 458#elif BUILD_OBJECT_TRACKER 459#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_object_tracker" 460#define OBJECT_LAYER_DESCRIPTION "lunarg_object_tracker" 461#elif BUILD_THREAD_SAFETY 462#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_threading" 463#define OBJECT_LAYER_DESCRIPTION "google_thread_checker" 464#elif BUILD_PARAMETER_VALIDATION 465#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_parameter_validation" 466#define OBJECT_LAYER_DESCRIPTION "lunarg_parameter_validation" 467#elif BUILD_CORE_VALIDATION 468#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_core_validation" 469#define OBJECT_LAYER_DESCRIPTION "lunarg_core_validation" 470#else 471#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_unique_objects" 472#define OBJECT_LAYER_DESCRIPTION "lunarg_unique_objects" 473#endif 474 475// Include layer validation object definitions 476#if BUILD_OBJECT_TRACKER 477#include "object_lifetime_validation.h" 478#endif 479#if BUILD_THREAD_SAFETY 480#include "thread_safety.h" 481#endif 482#if BUILD_PARAMETER_VALIDATION 483#include "stateless_validation.h" 484#endif 485#if BUILD_CORE_VALIDATION 486#include "core_validation.h" 487#endif 488#if BUILD_BEST_PRACTICES 489#include "best_practices.h" 490#endif 491 492namespace vulkan_layer_chassis { 493 494using std::unordered_map; 495 496static const VkLayerProperties global_layer = { 497 OBJECT_LAYER_NAME, VK_LAYER_API_VERSION, 1, "LunarG validation Layer", 498}; 499 500static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}, 501 {VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION}}; 502static const VkExtensionProperties device_extensions[] = { 503 {VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, VK_EXT_VALIDATION_CACHE_SPEC_VERSION}, 504 {VK_EXT_DEBUG_MARKER_EXTENSION_NAME, VK_EXT_DEBUG_MARKER_SPEC_VERSION}, 505}; 506 507typedef struct { 508 bool is_instance_api; 509 void* funcptr; 510} function_data; 511 512extern const std::unordered_map<std::string, function_data> name_to_funcptr_map; 513 514// Manually written functions 515 516// Check enabled instance extensions against supported instance extension whitelist 517static void InstanceExtensionWhitelist(ValidationObject *layer_data, const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) { 518 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 519 // Check for recognized instance extensions 520 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) { 521 log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 522 kVUIDUndefined, 523 "Instance Extension %s is not supported by this layer. Using this extension may adversely affect validation " 524 "results and/or produce undefined behavior.", 525 pCreateInfo->ppEnabledExtensionNames[i]); 526 } 527 } 528} 529 530// Check enabled device extensions against supported device extension whitelist 531static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDeviceCreateInfo *pCreateInfo, VkDevice device) { 532 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 533 // Check for recognized device extensions 534 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) { 535 log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 536 kVUIDUndefined, 537 "Device Extension %s is not supported by this layer. Using this extension may adversely affect validation " 538 "results and/or produce undefined behavior.", 539 pCreateInfo->ppEnabledExtensionNames[i]); 540 } 541 } 542} 543 544 545// Process validation features, flags and settings specified through extensions, a layer settings file, or environment variables 546 547static const std::unordered_map<std::string, VkValidationFeatureDisableEXT> VkValFeatureDisableLookup = { 548 {"VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT", VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT}, 549 {"VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT", VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT}, 550 {"VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT", VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT}, 551 {"VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT", VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT}, 552 {"VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT", VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT}, 553 {"VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT", VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT}, 554 {"VK_VALIDATION_FEATURE_DISABLE_ALL_EXT", VK_VALIDATION_FEATURE_DISABLE_ALL_EXT}, 555}; 556 557static const std::unordered_map<std::string, VkValidationFeatureEnableEXT> VkValFeatureEnableLookup = { 558 {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT}, 559 {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT}, 560}; 561 562static const std::unordered_map<std::string, VkValidationFeatureEnable> VkValFeatureEnableLookup2 = { 563 {"VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES", VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES}, 564}; 565 566static const std::unordered_map<std::string, ValidationCheckDisables> ValidationDisableLookup = { 567 {"VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE", VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE}, 568 {"VALIDATION_CHECK_DISABLE_OBJECT_IN_USE", VALIDATION_CHECK_DISABLE_OBJECT_IN_USE}, 569 {"VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET", VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET}, 570 {"VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE", VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE}, 571 {"VALIDATION_CHECK_DISABLE_QUERY_VALIDATION", VALIDATION_CHECK_DISABLE_QUERY_VALIDATION}, 572 {"VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION", VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION}, 573}; 574 575// Set the local disable flag for the appropriate VALIDATION_CHECK_DISABLE enum 576void SetValidationDisable(CHECK_DISABLED* disable_data, const ValidationCheckDisables disable_id) { 577 switch (disable_id) { 578 case VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE: 579 disable_data->command_buffer_state = true; 580 break; 581 case VALIDATION_CHECK_DISABLE_OBJECT_IN_USE: 582 disable_data->object_in_use = true; 583 break; 584 case VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET: 585 disable_data->idle_descriptor_set = true; 586 break; 587 case VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE: 588 disable_data->push_constant_range = true; 589 break; 590 case VALIDATION_CHECK_DISABLE_QUERY_VALIDATION: 591 disable_data->query_validation = true; 592 break; 593 case VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION: 594 disable_data->image_layout_validation = true; 595 break; 596 default: 597 assert(true); 598 } 599} 600 601// Set the local disable flag for a single VK_VALIDATION_FEATURE_DISABLE_* flag 602void SetValidationFeatureDisable(CHECK_DISABLED* disable_data, const VkValidationFeatureDisableEXT feature_disable) { 603 switch (feature_disable) { 604 case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT: 605 disable_data->shader_validation = true; 606 break; 607 case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT: 608 disable_data->thread_safety = true; 609 break; 610 case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT: 611 disable_data->stateless_checks = true; 612 break; 613 case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT: 614 disable_data->object_tracking = true; 615 break; 616 case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT: 617 disable_data->core_checks = true; 618 break; 619 case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT: 620 disable_data->handle_wrapping = true; 621 break; 622 case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT: 623 // Set all disabled flags to true 624 disable_data->SetAll(true); 625 break; 626 default: 627 break; 628 } 629} 630 631// Set the local enable flag for a single VK_VALIDATION_FEATURE_ENABLE_* flag 632void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnableEXT feature_enable) { 633 switch (feature_enable) { 634 case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT: 635 enable_data->gpu_validation = true; 636 break; 637 case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT: 638 enable_data->gpu_validation_reserve_binding_slot = true; 639 break; 640 default: 641 break; 642 } 643} 644 645void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnable feature_enable) { 646 switch(feature_enable) { 647 case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES: 648 enable_data->best_practices = true; 649 break; 650 default: 651 break; 652 } 653} 654 655// Set the local disable flag for settings specified through the VK_EXT_validation_flags extension 656void SetValidationFlags(CHECK_DISABLED* disables, const VkValidationFlagsEXT* val_flags_struct) { 657 for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) { 658 switch (val_flags_struct->pDisabledValidationChecks[i]) { 659 case VK_VALIDATION_CHECK_SHADERS_EXT: 660 disables->shader_validation = true; 661 break; 662 case VK_VALIDATION_CHECK_ALL_EXT: 663 // Set all disabled flags to true 664 disables->SetAll(true); 665 break; 666 default: 667 break; 668 } 669 } 670} 671 672// Process Validation Features flags specified through the ValidationFeature extension 673void SetValidationFeatures(CHECK_DISABLED *disable_data, CHECK_ENABLED *enable_data, 674 const VkValidationFeaturesEXT *val_features_struct) { 675 for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) { 676 SetValidationFeatureDisable(disable_data, val_features_struct->pDisabledValidationFeatures[i]); 677 } 678 for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) { 679 SetValidationFeatureEnable(enable_data, val_features_struct->pEnabledValidationFeatures[i]); 680 } 681} 682 683// Given a string representation of a list of enable enum values, call the appropriate setter function 684void SetLocalEnableSetting(std::string list_of_enables, std::string delimiter, CHECK_ENABLED* enables) { 685 size_t pos = 0; 686 std::string token; 687 while (list_of_enables.length() != 0) { 688 pos = list_of_enables.find(delimiter); 689 if (pos != std::string::npos) { 690 token = list_of_enables.substr(0, pos); 691 } else { 692 pos = list_of_enables.length() - delimiter.length(); 693 token = list_of_enables; 694 } 695 if (token.find("VK_VALIDATION_FEATURE_ENABLE_") != std::string::npos) { 696 auto result = VkValFeatureEnableLookup.find(token); 697 if (result != VkValFeatureEnableLookup.end()) { 698 SetValidationFeatureEnable(enables, result->second); 699 } else { 700 auto result2 = VkValFeatureEnableLookup2.find(token); 701 if (result2 != VkValFeatureEnableLookup2.end()) { 702 SetValidationFeatureEnable(enables, result2->second); 703 } 704 } 705 } 706 list_of_enables.erase(0, pos + delimiter.length()); 707 } 708} 709 710// Given a string representation of a list of disable enum values, call the appropriate setter function 711void SetLocalDisableSetting(std::string list_of_disables, std::string delimiter, CHECK_DISABLED* disables) { 712 size_t pos = 0; 713 std::string token; 714 while (list_of_disables.length() != 0) { 715 pos = list_of_disables.find(delimiter); 716 if (pos != std::string::npos) { 717 token = list_of_disables.substr(0, pos); 718 } else { 719 pos = list_of_disables.length() - delimiter.length(); 720 token = list_of_disables; 721 } 722 if (token.find("VK_VALIDATION_FEATURE_DISABLE_") != std::string::npos) { 723 auto result = VkValFeatureDisableLookup.find(token); 724 if (result != VkValFeatureDisableLookup.end()) { 725 SetValidationFeatureDisable(disables, result->second); 726 } 727 } 728 if (token.find("VALIDATION_CHECK_DISABLE_") != std::string::npos) { 729 auto result = ValidationDisableLookup.find(token); 730 if (result != ValidationDisableLookup.end()) { 731 SetValidationDisable(disables, result->second); 732 } 733 } 734 list_of_disables.erase(0, pos + delimiter.length()); 735 } 736} 737 738// Process enables and disables set though the vk_layer_settings.txt config file or through an environment variable 739void ProcessConfigAndEnvSettings(const char* layer_description, CHECK_ENABLED* enables, CHECK_DISABLED* disables) { 740 std::string enable_key = layer_description; 741 std::string disable_key = layer_description; 742 enable_key.append(".enables"); 743 disable_key.append(".disables"); 744 std::string list_of_config_enables = getLayerOption(enable_key.c_str()); 745 std::string list_of_env_enables = GetLayerEnvVar("VK_LAYER_ENABLES"); 746 std::string list_of_config_disables = getLayerOption(disable_key.c_str()); 747 std::string list_of_env_disables = GetLayerEnvVar("VK_LAYER_DISABLES"); 748#if defined(_WIN32) 749 std::string env_delimiter = ";"; 750#else 751 std::string env_delimiter = ":"; 752#endif 753 SetLocalEnableSetting(list_of_config_enables, ",", enables); 754 SetLocalEnableSetting(list_of_env_enables, env_delimiter, enables); 755 SetLocalDisableSetting(list_of_config_disables, ",", disables); 756 SetLocalDisableSetting(list_of_env_disables, env_delimiter, disables); 757} 758 759 760// Non-code-generated chassis API functions 761 762VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) { 763 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 764 if (!ApiParentExtensionEnabled(funcName, &layer_data->device_extensions)) { 765 return nullptr; 766 } 767 const auto &item = name_to_funcptr_map.find(funcName); 768 if (item != name_to_funcptr_map.end()) { 769 if (item->second.is_instance_api) { 770 return nullptr; 771 } else { 772 return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr); 773 } 774 } 775 auto &table = layer_data->device_dispatch_table; 776 if (!table.GetDeviceProcAddr) return nullptr; 777 return table.GetDeviceProcAddr(device, funcName); 778} 779 780VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) { 781 const auto &item = name_to_funcptr_map.find(funcName); 782 if (item != name_to_funcptr_map.end()) { 783 return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr); 784 } 785 auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map); 786 auto &table = layer_data->instance_dispatch_table; 787 if (!table.GetInstanceProcAddr) return nullptr; 788 return table.GetInstanceProcAddr(instance, funcName); 789} 790 791VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) { 792 return util_GetLayerProperties(1, &global_layer, pCount, pProperties); 793} 794 795VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, 796 VkLayerProperties *pProperties) { 797 return util_GetLayerProperties(1, &global_layer, pCount, pProperties); 798} 799 800VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, 801 VkExtensionProperties *pProperties) { 802 if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) 803 return util_GetExtensionProperties(ARRAY_SIZE(instance_extensions), instance_extensions, pCount, pProperties); 804 805 return VK_ERROR_LAYER_NOT_PRESENT; 806} 807 808VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, 809 uint32_t *pCount, VkExtensionProperties *pProperties) { 810 if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(ARRAY_SIZE(device_extensions), device_extensions, pCount, pProperties); 811 assert(physicalDevice); 812 auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map); 813 return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pCount, pProperties); 814} 815 816VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, 817 VkInstance *pInstance) { 818 VkLayerInstanceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 819 820 assert(chain_info->u.pLayerInfo); 821 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 822 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance"); 823 if (fpCreateInstance == NULL) return VK_ERROR_INITIALIZATION_FAILED; 824 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 825 uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0); 826 uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1; 827 828 CHECK_ENABLED local_enables {}; 829 CHECK_DISABLED local_disables {}; 830 const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext); 831 if (validation_features_ext) { 832 SetValidationFeatures(&local_disables, &local_enables, validation_features_ext); 833 } 834 const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext); 835 if (validation_flags_ext) { 836 SetValidationFlags(&local_disables, validation_flags_ext); 837 } 838 ProcessConfigAndEnvSettings(OBJECT_LAYER_DESCRIPTION, &local_enables, &local_disables); 839 840 // Create temporary dispatch vector for pre-calls until instance is created 841 std::vector<ValidationObject*> local_object_dispatch; 842 // Add VOs to dispatch vector. Order here will be the validation dispatch order! 843#if BUILD_THREAD_SAFETY 844 auto thread_checker = new ThreadSafety; 845 if (!local_disables.thread_safety) { 846 local_object_dispatch.emplace_back(thread_checker); 847 } 848 thread_checker->container_type = LayerObjectTypeThreading; 849 thread_checker->api_version = api_version; 850#endif 851#if BUILD_PARAMETER_VALIDATION 852 auto parameter_validation = new StatelessValidation; 853 if (!local_disables.stateless_checks) { 854 local_object_dispatch.emplace_back(parameter_validation); 855 } 856 parameter_validation->container_type = LayerObjectTypeParameterValidation; 857 parameter_validation->api_version = api_version; 858#endif 859#if BUILD_OBJECT_TRACKER 860 auto object_tracker = new ObjectLifetimes; 861 if (!local_disables.object_tracking) { 862 local_object_dispatch.emplace_back(object_tracker); 863 } 864 object_tracker->container_type = LayerObjectTypeObjectTracker; 865 object_tracker->api_version = api_version; 866#endif 867#if BUILD_CORE_VALIDATION 868 auto core_checks = new CoreChecks; 869 if (!local_disables.core_checks) { 870 local_object_dispatch.emplace_back(core_checks); 871 } 872 core_checks->container_type = LayerObjectTypeCoreValidation; 873 core_checks->api_version = api_version; 874#endif 875#if BUILD_BEST_PRACTICES 876 auto best_practices = new BestPractices; 877 if (local_enables.best_practices) { 878 local_object_dispatch.emplace_back(best_practices); 879 } 880 best_practices->container_type = LayerObjectTypeBestPractices; 881 best_practices->api_version = api_version; 882#endif 883 884 // If handle wrapping is disabled via the ValidationFeatures extension, override build flag 885 if (local_disables.handle_wrapping) { 886 wrap_handles = false; 887 } 888 889 // Init dispatch array and call registration functions 890 for (auto intercept : local_object_dispatch) { 891 intercept->PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance); 892 } 893 for (auto intercept : local_object_dispatch) { 894 intercept->PreCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance); 895 } 896 897 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance); 898 if (result != VK_SUCCESS) return result; 899 900 auto framework = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map); 901 902 framework->object_dispatch = local_object_dispatch; 903 framework->container_type = LayerObjectTypeInstance; 904 framework->disabled = local_disables; 905 framework->enabled = local_enables; 906 907 framework->instance = *pInstance; 908 layer_init_instance_dispatch_table(*pInstance, &framework->instance_dispatch_table, fpGetInstanceProcAddr); 909 framework->report_data = debug_utils_create_instance(&framework->instance_dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount, 910 pCreateInfo->ppEnabledExtensionNames); 911 framework->api_version = api_version; 912 framework->instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo); 913 914 layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, OBJECT_LAYER_DESCRIPTION); 915 916#if BUILD_OBJECT_TRACKER 917 object_tracker->report_data = framework->report_data; 918 object_tracker->instance_dispatch_table = framework->instance_dispatch_table; 919 object_tracker->enabled = framework->enabled; 920 object_tracker->disabled = framework->disabled; 921#endif 922#if BUILD_THREAD_SAFETY 923 thread_checker->report_data = framework->report_data; 924 thread_checker->instance_dispatch_table = framework->instance_dispatch_table; 925 thread_checker->enabled = framework->enabled; 926 thread_checker->disabled = framework->disabled; 927#endif 928#if BUILD_PARAMETER_VALIDATION 929 parameter_validation->report_data = framework->report_data; 930 parameter_validation->instance_dispatch_table = framework->instance_dispatch_table; 931 parameter_validation->enabled = framework->enabled; 932 parameter_validation->disabled = framework->disabled; 933#endif 934#if BUILD_CORE_VALIDATION 935 core_checks->report_data = framework->report_data; 936 core_checks->instance_dispatch_table = framework->instance_dispatch_table; 937 core_checks->instance = *pInstance; 938 core_checks->enabled = framework->enabled; 939 core_checks->disabled = framework->disabled; 940 core_checks->instance_state = core_checks; 941#endif 942#if BUILD_BEST_PRACTICES 943 best_practices->report_data = framework->report_data; 944 best_practices->instance_dispatch_table = framework->instance_dispatch_table; 945 best_practices->enabled = framework->enabled; 946 best_practices->disabled = framework->disabled; 947#endif 948 949 for (auto intercept : framework->object_dispatch) { 950 intercept->PostCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance, result); 951 } 952 953 InstanceExtensionWhitelist(framework, pCreateInfo, *pInstance); 954 955 return result; 956} 957 958VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) { 959 dispatch_key key = get_dispatch_key(instance); 960 auto layer_data = GetLayerDataPtr(key, layer_data_map); 961 """ + precallvalidate_loop + """ 962 auto lock = intercept->write_lock(); 963 intercept->PreCallValidateDestroyInstance(instance, pAllocator); 964 } 965 """ + precallrecord_loop + """ 966 auto lock = intercept->write_lock(); 967 intercept->PreCallRecordDestroyInstance(instance, pAllocator); 968 } 969 970 layer_data->instance_dispatch_table.DestroyInstance(instance, pAllocator); 971 972 """ + postcallrecord_loop + """ 973 auto lock = intercept->write_lock(); 974 intercept->PostCallRecordDestroyInstance(instance, pAllocator); 975 } 976 // Clean up logging callback, if any 977 while (layer_data->logging_messenger.size() > 0) { 978 VkDebugUtilsMessengerEXT messenger = layer_data->logging_messenger.back(); 979 layer_destroy_messenger_callback(layer_data->report_data, messenger, pAllocator); 980 layer_data->logging_messenger.pop_back(); 981 } 982 while (layer_data->logging_callback.size() > 0) { 983 VkDebugReportCallbackEXT callback = layer_data->logging_callback.back(); 984 layer_destroy_report_callback(layer_data->report_data, callback, pAllocator); 985 layer_data->logging_callback.pop_back(); 986 } 987 988 layer_debug_utils_destroy_instance(layer_data->report_data); 989 990 for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) { 991 delete *item; 992 } 993 FreeLayerDataPtr(key, layer_data_map); 994} 995 996VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo, 997 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) { 998 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 999 1000 auto instance_interceptor = GetLayerDataPtr(get_dispatch_key(gpu), layer_data_map); 1001 1002 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 1003 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr; 1004 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(instance_interceptor->instance, "vkCreateDevice"); 1005 if (fpCreateDevice == NULL) { 1006 return VK_ERROR_INITIALIZATION_FAILED; 1007 } 1008 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 1009 1010 // Get physical device limits for device 1011 VkPhysicalDeviceProperties device_properties = {}; 1012 instance_interceptor->instance_dispatch_table.GetPhysicalDeviceProperties(gpu, &device_properties); 1013 1014 // Setup the validation tables based on the application API version from the instance and the capabilities of the device driver 1015 uint32_t effective_api_version = std::min(device_properties.apiVersion, instance_interceptor->api_version); 1016 1017 DeviceExtensions device_extensions = {}; 1018 device_extensions.InitFromDeviceCreateInfo(&instance_interceptor->instance_extensions, effective_api_version, pCreateInfo); 1019 for (auto item : instance_interceptor->object_dispatch) { 1020 item->device_extensions = device_extensions; 1021 } 1022 1023 safe_VkDeviceCreateInfo modified_create_info(pCreateInfo); 1024 1025 bool skip = false; 1026 for (auto intercept : instance_interceptor->object_dispatch) { 1027 auto lock = intercept->write_lock(); 1028 skip |= intercept->PreCallValidateCreateDevice(gpu, pCreateInfo, pAllocator, pDevice); 1029 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1030 } 1031 for (auto intercept : instance_interceptor->object_dispatch) { 1032 auto lock = intercept->write_lock(); 1033 intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, &modified_create_info); 1034 } 1035 1036 VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(&modified_create_info), pAllocator, pDevice); 1037 if (result != VK_SUCCESS) { 1038 return result; 1039 } 1040 1041 auto device_interceptor = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map); 1042 device_interceptor->container_type = LayerObjectTypeDevice; 1043 1044 // Save local info in device object 1045 device_interceptor->phys_dev_properties.properties = device_properties; 1046 device_interceptor->api_version = device_interceptor->device_extensions.InitFromDeviceCreateInfo( 1047 &instance_interceptor->instance_extensions, effective_api_version, pCreateInfo); 1048 device_interceptor->device_extensions = device_extensions; 1049 1050 layer_init_device_dispatch_table(*pDevice, &device_interceptor->device_dispatch_table, fpGetDeviceProcAddr); 1051 1052 device_interceptor->device = *pDevice; 1053 device_interceptor->physical_device = gpu; 1054 device_interceptor->instance = instance_interceptor->instance; 1055 device_interceptor->report_data = layer_debug_utils_create_device(instance_interceptor->report_data, *pDevice); 1056 1057 // Note that this defines the order in which the layer validation objects are called 1058#if BUILD_THREAD_SAFETY 1059 auto thread_safety = new ThreadSafety; 1060 thread_safety->container_type = LayerObjectTypeThreading; 1061 if (!instance_interceptor->disabled.thread_safety) { 1062 device_interceptor->object_dispatch.emplace_back(thread_safety); 1063 } 1064#endif 1065#if BUILD_PARAMETER_VALIDATION 1066 auto stateless_validation = new StatelessValidation; 1067 stateless_validation->container_type = LayerObjectTypeParameterValidation; 1068 if (!instance_interceptor->disabled.stateless_checks) { 1069 device_interceptor->object_dispatch.emplace_back(stateless_validation); 1070 } 1071#endif 1072#if BUILD_OBJECT_TRACKER 1073 auto object_tracker = new ObjectLifetimes; 1074 object_tracker->container_type = LayerObjectTypeObjectTracker; 1075 if (!instance_interceptor->disabled.object_tracking) { 1076 device_interceptor->object_dispatch.emplace_back(object_tracker); 1077 } 1078#endif 1079#if BUILD_CORE_VALIDATION 1080 auto core_checks = new CoreChecks; 1081 core_checks->container_type = LayerObjectTypeCoreValidation; 1082 core_checks->instance_state = reinterpret_cast<CoreChecks *>( 1083 core_checks->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeCoreValidation)); 1084 if (!instance_interceptor->disabled.core_checks) { 1085 device_interceptor->object_dispatch.emplace_back(core_checks); 1086 } 1087#endif 1088#if BUILD_BEST_PRACTICES 1089 auto best_practices = new BestPractices; 1090 best_practices->container_type = LayerObjectTypeBestPractices; 1091 if (instance_interceptor->enabled.best_practices) { 1092 device_interceptor->object_dispatch.emplace_back(best_practices); 1093 } 1094#endif 1095 1096 // Set per-intercept common data items 1097 for (auto dev_intercept : device_interceptor->object_dispatch) { 1098 dev_intercept->device = *pDevice; 1099 dev_intercept->physical_device = gpu; 1100 dev_intercept->instance = instance_interceptor->instance; 1101 dev_intercept->report_data = device_interceptor->report_data; 1102 dev_intercept->device_dispatch_table = device_interceptor->device_dispatch_table; 1103 dev_intercept->api_version = device_interceptor->api_version; 1104 dev_intercept->disabled = instance_interceptor->disabled; 1105 dev_intercept->enabled = instance_interceptor->enabled; 1106 dev_intercept->instance_dispatch_table = instance_interceptor->instance_dispatch_table; 1107 dev_intercept->instance_extensions = instance_interceptor->instance_extensions; 1108 dev_intercept->device_extensions = device_interceptor->device_extensions; 1109 } 1110 1111 for (auto intercept : instance_interceptor->object_dispatch) { 1112 auto lock = intercept->write_lock(); 1113 intercept->PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result); 1114 } 1115 1116 DeviceExtensionWhitelist(device_interceptor, pCreateInfo, *pDevice); 1117 1118 return result; 1119} 1120 1121VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) { 1122 dispatch_key key = get_dispatch_key(device); 1123 auto layer_data = GetLayerDataPtr(key, layer_data_map); 1124 """ + precallvalidate_loop + """ 1125 auto lock = intercept->write_lock(); 1126 intercept->PreCallValidateDestroyDevice(device, pAllocator); 1127 } 1128 """ + precallrecord_loop + """ 1129 auto lock = intercept->write_lock(); 1130 intercept->PreCallRecordDestroyDevice(device, pAllocator); 1131 } 1132 layer_debug_utils_destroy_device(device); 1133 1134 layer_data->device_dispatch_table.DestroyDevice(device, pAllocator); 1135 1136 """ + postcallrecord_loop + """ 1137 auto lock = intercept->write_lock(); 1138 intercept->PostCallRecordDestroyDevice(device, pAllocator); 1139 } 1140 1141 for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) { 1142 delete *item; 1143 } 1144 FreeLayerDataPtr(key, layer_data_map); 1145} 1146 1147 1148// Special-case APIs for which core_validation needs custom parameter lists and/or modifies parameters 1149 1150VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines( 1151 VkDevice device, 1152 VkPipelineCache pipelineCache, 1153 uint32_t createInfoCount, 1154 const VkGraphicsPipelineCreateInfo* pCreateInfos, 1155 const VkAllocationCallbacks* pAllocator, 1156 VkPipeline* pPipelines) { 1157 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1158 bool skip = false; 1159 1160#ifdef BUILD_CORE_VALIDATION 1161 create_graphics_pipeline_api_state cgpl_state{}; 1162#else 1163 struct create_graphics_pipeline_api_state { 1164 const VkGraphicsPipelineCreateInfo* pCreateInfos; 1165 } cgpl_state; 1166#endif 1167 cgpl_state.pCreateInfos = pCreateInfos; 1168 1169 for (auto intercept : layer_data->object_dispatch) { 1170 auto lock = intercept->write_lock(); 1171 skip |= intercept->PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state); 1172 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1173 } 1174 for (auto intercept : layer_data->object_dispatch) { 1175 auto lock = intercept->write_lock(); 1176 intercept->PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state); 1177 } 1178 1179 VkResult result = DispatchCreateGraphicsPipelines(device, pipelineCache, createInfoCount, cgpl_state.pCreateInfos, pAllocator, pPipelines); 1180 1181 for (auto intercept : layer_data->object_dispatch) { 1182 auto lock = intercept->write_lock(); 1183 intercept->PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &cgpl_state); 1184 } 1185 return result; 1186} 1187 1188// This API saves some core_validation pipeline state state on the stack for performance purposes 1189VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines( 1190 VkDevice device, 1191 VkPipelineCache pipelineCache, 1192 uint32_t createInfoCount, 1193 const VkComputePipelineCreateInfo* pCreateInfos, 1194 const VkAllocationCallbacks* pAllocator, 1195 VkPipeline* pPipelines) { 1196 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1197 bool skip = false; 1198 1199#ifdef BUILD_CORE_VALIDATION 1200 create_compute_pipeline_api_state ccpl_state{}; 1201#else 1202 struct create_compute_pipeline_api_state { 1203 const VkComputePipelineCreateInfo* pCreateInfos; 1204 } ccpl_state; 1205#endif 1206 ccpl_state.pCreateInfos = pCreateInfos; 1207 1208 for (auto intercept : layer_data->object_dispatch) { 1209 auto lock = intercept->write_lock(); 1210 skip |= intercept->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &ccpl_state); 1211 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1212 } 1213 for (auto intercept : layer_data->object_dispatch) { 1214 auto lock = intercept->write_lock(); 1215 intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &ccpl_state); 1216 } 1217 VkResult result = DispatchCreateComputePipelines(device, pipelineCache, createInfoCount, ccpl_state.pCreateInfos, pAllocator, pPipelines); 1218 for (auto intercept : layer_data->object_dispatch) { 1219 auto lock = intercept->write_lock(); 1220 intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &ccpl_state); 1221 } 1222 return result; 1223} 1224 1225VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV( 1226 VkDevice device, 1227 VkPipelineCache pipelineCache, 1228 uint32_t createInfoCount, 1229 const VkRayTracingPipelineCreateInfoNV* pCreateInfos, 1230 const VkAllocationCallbacks* pAllocator, 1231 VkPipeline* pPipelines) { 1232 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1233 bool skip = false; 1234 1235#ifdef BUILD_CORE_VALIDATION 1236 create_ray_tracing_pipeline_api_state crtpl_state{}; 1237#else 1238 struct create_ray_tracing_pipeline_api_state { 1239 const VkRayTracingPipelineCreateInfoNV* pCreateInfos; 1240 } crtpl_state; 1241#endif 1242 crtpl_state.pCreateInfos = pCreateInfos; 1243 1244 for (auto intercept : layer_data->object_dispatch) { 1245 auto lock = intercept->write_lock(); 1246 skip |= intercept->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, 1247 pAllocator, pPipelines, &crtpl_state); 1248 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1249 } 1250 for (auto intercept : layer_data->object_dispatch) { 1251 auto lock = intercept->write_lock(); 1252 intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, 1253 pPipelines, &crtpl_state); 1254 } 1255 VkResult result = DispatchCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1256 for (auto intercept : layer_data->object_dispatch) { 1257 auto lock = intercept->write_lock(); 1258 intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, 1259 pPipelines, result, &crtpl_state); 1260 } 1261 return result; 1262} 1263 1264// This API needs the ability to modify a down-chain parameter 1265VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout( 1266 VkDevice device, 1267 const VkPipelineLayoutCreateInfo* pCreateInfo, 1268 const VkAllocationCallbacks* pAllocator, 1269 VkPipelineLayout* pPipelineLayout) { 1270 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1271 bool skip = false; 1272 1273#ifndef BUILD_CORE_VALIDATION 1274 struct create_pipeline_layout_api_state { 1275 VkPipelineLayoutCreateInfo modified_create_info; 1276 }; 1277#endif 1278 create_pipeline_layout_api_state cpl_state{}; 1279 cpl_state.modified_create_info = *pCreateInfo; 1280 1281 for (auto intercept : layer_data->object_dispatch) { 1282 auto lock = intercept->write_lock(); 1283 skip |= intercept->PreCallValidateCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); 1284 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1285 } 1286 for (auto intercept : layer_data->object_dispatch) { 1287 auto lock = intercept->write_lock(); 1288 intercept->PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state); 1289 } 1290 VkResult result = DispatchCreatePipelineLayout(device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout); 1291 for (auto intercept : layer_data->object_dispatch) { 1292 auto lock = intercept->write_lock(); 1293 intercept->PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result); 1294 } 1295 return result; 1296} 1297 1298// This API needs some local stack data for performance reasons and also may modify a parameter 1299VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule( 1300 VkDevice device, 1301 const VkShaderModuleCreateInfo* pCreateInfo, 1302 const VkAllocationCallbacks* pAllocator, 1303 VkShaderModule* pShaderModule) { 1304 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1305 bool skip = false; 1306 1307#ifndef BUILD_CORE_VALIDATION 1308 struct create_shader_module_api_state { 1309 VkShaderModuleCreateInfo instrumented_create_info; 1310 }; 1311#endif 1312 create_shader_module_api_state csm_state{}; 1313 csm_state.instrumented_create_info = *pCreateInfo; 1314 1315 for (auto intercept : layer_data->object_dispatch) { 1316 auto lock = intercept->write_lock(); 1317 skip |= intercept->PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state); 1318 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1319 } 1320 for (auto intercept : layer_data->object_dispatch) { 1321 auto lock = intercept->write_lock(); 1322 intercept->PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state); 1323 } 1324 VkResult result = DispatchCreateShaderModule(device, &csm_state.instrumented_create_info, pAllocator, pShaderModule); 1325 for (auto intercept : layer_data->object_dispatch) { 1326 auto lock = intercept->write_lock(); 1327 intercept->PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result, &csm_state); 1328 } 1329 return result; 1330} 1331 1332VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets( 1333 VkDevice device, 1334 const VkDescriptorSetAllocateInfo* pAllocateInfo, 1335 VkDescriptorSet* pDescriptorSets) { 1336 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1337 bool skip = false; 1338 1339#ifdef BUILD_CORE_VALIDATION 1340 cvdescriptorset::AllocateDescriptorSetsData ads_state(pAllocateInfo->descriptorSetCount); 1341#else 1342 struct ads_state {} ads_state; 1343#endif 1344 1345 for (auto intercept : layer_data->object_dispatch) { 1346 auto lock = intercept->write_lock(); 1347 skip |= intercept->PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, &ads_state); 1348 if (skip) return VK_ERROR_VALIDATION_FAILED_EXT; 1349 } 1350 for (auto intercept : layer_data->object_dispatch) { 1351 auto lock = intercept->write_lock(); 1352 intercept->PreCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); 1353 } 1354 VkResult result = DispatchAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); 1355 for (auto intercept : layer_data->object_dispatch) { 1356 auto lock = intercept->write_lock(); 1357 intercept->PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result, &ads_state); 1358 } 1359 return result; 1360} 1361 1362 1363 1364 1365 1366// ValidationCache APIs do not dispatch 1367 1368VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT( 1369 VkDevice device, 1370 const VkValidationCacheCreateInfoEXT* pCreateInfo, 1371 const VkAllocationCallbacks* pAllocator, 1372 VkValidationCacheEXT* pValidationCache) { 1373 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1374 VkResult result = VK_SUCCESS; 1375 1376 ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation); 1377 if (validation_data) { 1378 auto lock = validation_data->write_lock(); 1379 result = validation_data->CoreLayerCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache); 1380 } 1381 return result; 1382} 1383 1384VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT( 1385 VkDevice device, 1386 VkValidationCacheEXT validationCache, 1387 const VkAllocationCallbacks* pAllocator) { 1388 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1389 1390 ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation); 1391 if (validation_data) { 1392 auto lock = validation_data->write_lock(); 1393 validation_data->CoreLayerDestroyValidationCacheEXT(device, validationCache, pAllocator); 1394 } 1395} 1396 1397VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT( 1398 VkDevice device, 1399 VkValidationCacheEXT dstCache, 1400 uint32_t srcCacheCount, 1401 const VkValidationCacheEXT* pSrcCaches) { 1402 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1403 VkResult result = VK_SUCCESS; 1404 1405 ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation); 1406 if (validation_data) { 1407 auto lock = validation_data->write_lock(); 1408 result = validation_data->CoreLayerMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches); 1409 } 1410 return result; 1411} 1412 1413VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT( 1414 VkDevice device, 1415 VkValidationCacheEXT validationCache, 1416 size_t* pDataSize, 1417 void* pData) { 1418 auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); 1419 VkResult result = VK_SUCCESS; 1420 1421 ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation); 1422 if (validation_data) { 1423 auto lock = validation_data->write_lock(); 1424 result = validation_data->CoreLayerGetValidationCacheDataEXT(device, validationCache, pDataSize, pData); 1425 } 1426 return result; 1427 1428}""" 1429 1430 inline_custom_validation_class_definitions = """ 1431 virtual VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; }; 1432 virtual void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {}; 1433 virtual VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) { return VK_SUCCESS; }; 1434 virtual VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) { return VK_SUCCESS; }; 1435 1436 // Allow additional state parameter for CreateGraphicsPipelines 1437 virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) { 1438 return PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1439 }; 1440 virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) { 1441 PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1442 }; 1443 virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* cgpl_state) { 1444 PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result); 1445 }; 1446 1447 // Allow additional state parameter for CreateComputePipelines 1448 virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) { 1449 return PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1450 }; 1451 virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) { 1452 PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1453 }; 1454 virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) { 1455 PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result); 1456 }; 1457 1458 // Allow additional state parameter for CreateRayTracingPipelinesNV 1459 virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) { 1460 return PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1461 }; 1462 virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) { 1463 PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 1464 }; 1465 virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) { 1466 PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result); 1467 }; 1468 1469 // Allow modification of a down-chain parameter for CreatePipelineLayout 1470 virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, void *cpl_state) { 1471 PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); 1472 }; 1473 1474 // Enable the CreateShaderModule API to take an extra argument for state preservation and paramter modification 1475 virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) { 1476 return PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); 1477 }; 1478 virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) { 1479 PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); 1480 }; 1481 virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result, void* csm_state) { 1482 PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result); 1483 }; 1484 1485 // Allow AllocateDescriptorSets to use some local stack storage for performance purposes 1486 virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, void* ads_state) { 1487 return PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); 1488 }; 1489 virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state) { 1490 PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result); 1491 }; 1492 1493 // Modify a parameter to CreateDevice 1494 virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, safe_VkDeviceCreateInfo *modified_create_info) { 1495 PreCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice); 1496 }; 1497""" 1498 1499 inline_custom_source_postamble = """ 1500// loader-layer interface v0, just wrappers since there is only a layer 1501 1502VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, 1503 VkExtensionProperties *pProperties) { 1504 return vulkan_layer_chassis::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties); 1505} 1506 1507VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount, 1508 VkLayerProperties *pProperties) { 1509 return vulkan_layer_chassis::EnumerateInstanceLayerProperties(pCount, pProperties); 1510} 1511 1512VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, 1513 VkLayerProperties *pProperties) { 1514 // the layer command handles VK_NULL_HANDLE just fine internally 1515 assert(physicalDevice == VK_NULL_HANDLE); 1516 return vulkan_layer_chassis::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties); 1517} 1518 1519VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, 1520 const char *pLayerName, uint32_t *pCount, 1521 VkExtensionProperties *pProperties) { 1522 // the layer command handles VK_NULL_HANDLE just fine internally 1523 assert(physicalDevice == VK_NULL_HANDLE); 1524 return vulkan_layer_chassis::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties); 1525} 1526 1527VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) { 1528 return vulkan_layer_chassis::GetDeviceProcAddr(dev, funcName); 1529} 1530 1531VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) { 1532 return vulkan_layer_chassis::GetInstanceProcAddr(instance, funcName); 1533} 1534 1535VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) { 1536 assert(pVersionStruct != NULL); 1537 assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT); 1538 1539 // Fill in the function pointers if our version is at least capable of having the structure contain them. 1540 if (pVersionStruct->loaderLayerInterfaceVersion >= 2) { 1541 pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr; 1542 pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr; 1543 pVersionStruct->pfnGetPhysicalDeviceProcAddr = nullptr; 1544 } 1545 1546 return VK_SUCCESS; 1547}""" 1548 1549 1550 def __init__(self, 1551 errFile = sys.stderr, 1552 warnFile = sys.stderr, 1553 diagFile = sys.stdout): 1554 OutputGenerator.__init__(self, errFile, warnFile, diagFile) 1555 # Internal state - accumulators for different inner block text 1556 self.sections = dict([(section, []) for section in self.ALL_SECTIONS]) 1557 self.intercepts = [] 1558 self.layer_factory = '' # String containing base layer factory class definition 1559 1560 # Check if the parameter passed in is a pointer to an array 1561 def paramIsArray(self, param): 1562 return param.attrib.get('len') is not None 1563 1564 # Check if the parameter passed in is a pointer 1565 def paramIsPointer(self, param): 1566 ispointer = False 1567 for elem in param: 1568 if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail: 1569 ispointer = True 1570 return ispointer 1571 1572 # 1573 # 1574 def beginFile(self, genOpts): 1575 OutputGenerator.beginFile(self, genOpts) 1576 # Output Copyright 1577 write(self.inline_copyright_message, file=self.outFile) 1578 # Multiple inclusion protection 1579 self.header = False 1580 if (self.genOpts.filename and 'h' == self.genOpts.filename[-1]): 1581 self.header = True 1582 write('#pragma once', file=self.outFile) 1583 self.newline() 1584 if self.header: 1585 write(self.inline_custom_header_preamble, file=self.outFile) 1586 else: 1587 write(self.inline_custom_source_preamble, file=self.outFile) 1588 self.layer_factory += self.inline_custom_header_class_definition 1589 # 1590 # 1591 def endFile(self): 1592 # Finish C++ namespace and multiple inclusion protection 1593 self.newline() 1594 if not self.header: 1595 # Record intercepted procedures 1596 write('// Map of intercepted ApiName to its associated function data', file=self.outFile) 1597 write('const std::unordered_map<std::string, function_data> name_to_funcptr_map = {', file=self.outFile) 1598 write('\n'.join(self.intercepts), file=self.outFile) 1599 write('};\n', file=self.outFile) 1600 self.newline() 1601 write('} // namespace vulkan_layer_chassis', file=self.outFile) 1602 if self.header: 1603 self.newline() 1604 # Output Layer Factory Class Definitions 1605 self.layer_factory += self.inline_custom_validation_class_definitions 1606 self.layer_factory += '};\n\n' 1607 self.layer_factory += 'extern std::unordered_map<void*, ValidationObject*> layer_data_map;' 1608 write(self.layer_factory, file=self.outFile) 1609 else: 1610 write(self.inline_custom_source_postamble, file=self.outFile) 1611 # Finish processing in superclass 1612 OutputGenerator.endFile(self) 1613 1614 def beginFeature(self, interface, emit): 1615 # Start processing in superclass 1616 OutputGenerator.beginFeature(self, interface, emit) 1617 # Get feature extra protect 1618 self.featureExtraProtect = GetFeatureProtect(interface) 1619 # Accumulate includes, defines, types, enums, function pointer typedefs, end function prototypes separately for this 1620 # feature. They're only printed in endFeature(). 1621 self.sections = dict([(section, []) for section in self.ALL_SECTIONS]) 1622 1623 def endFeature(self): 1624 # Actually write the interface to the output file. 1625 if (self.emit): 1626 self.newline() 1627 # If type declarations are needed by other features based on this one, it may be necessary to suppress the ExtraProtect, 1628 # or move it below the 'for section...' loop. 1629 if (self.featureExtraProtect != None): 1630 write('#ifdef', self.featureExtraProtect, file=self.outFile) 1631 for section in self.TYPE_SECTIONS: 1632 contents = self.sections[section] 1633 if contents: 1634 write('\n'.join(contents), file=self.outFile) 1635 self.newline() 1636 if (self.sections['command']): 1637 write('\n'.join(self.sections['command']), end=u'', file=self.outFile) 1638 self.newline() 1639 if (self.featureExtraProtect != None): 1640 write('#endif //', self.featureExtraProtect, file=self.outFile) 1641 # Finish processing in superclass 1642 OutputGenerator.endFeature(self) 1643 # 1644 # Append a definition to the specified section 1645 def appendSection(self, section, text): 1646 self.sections[section].append(text) 1647 # 1648 # Type generation 1649 def genType(self, typeinfo, name, alias): 1650 pass 1651 # 1652 # Struct (e.g. C "struct" type) generation. This is a special case of the <type> tag where the contents are 1653 # interpreted as a set of <member> tags instead of freeform C type declarations. The <member> tags are just like <param> 1654 # tags - they are a declaration of a struct or union member. Only simple member declarations are supported (no nested 1655 # structs etc.) 1656 def genStruct(self, typeinfo, typeName): 1657 OutputGenerator.genStruct(self, typeinfo, typeName) 1658 body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n' 1659 # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam) 1660 for member in typeinfo.elem.findall('.//member'): 1661 body += self.makeCParamDecl(member, self.genOpts.alignFuncParam) 1662 body += ';\n' 1663 body += '} ' + typeName + ';\n' 1664 self.appendSection('struct', body) 1665 # 1666 # Group (e.g. C "enum" type) generation. These are concatenated together with other types. 1667 def genGroup(self, groupinfo, groupName, alias): 1668 pass 1669 # Enumerant generation 1670 # <enum> tags may specify their values in several ways, but are usually just integers. 1671 def genEnum(self, enuminfo, name, alias): 1672 pass 1673 # 1674 # Customize Cdecl for layer factory base class 1675 def BaseClassCdecl(self, elem, name): 1676 raw = self.makeCDecls(elem)[1] 1677 1678 # Toss everything before the undecorated name 1679 prototype = raw.split("VKAPI_PTR *PFN_vk")[1] 1680 prototype = prototype.replace(")", "", 1) 1681 prototype = prototype.replace(";", " {};") 1682 1683 # Build up pre/post call virtual function declarations 1684 pre_call_validate = 'virtual bool PreCallValidate' + prototype 1685 pre_call_validate = pre_call_validate.replace("{}", " { return false; }") 1686 pre_call_record = 'virtual void PreCallRecord' + prototype 1687 post_call_record = 'virtual void PostCallRecord' + prototype 1688 resulttype = elem.find('proto/type') 1689 if resulttype.text == 'VkResult': 1690 post_call_record = post_call_record.replace(')', ', VkResult result)') 1691 return ' %s\n %s\n %s\n' % (pre_call_validate, pre_call_record, post_call_record) 1692 # 1693 # Command generation 1694 def genCmd(self, cmdinfo, name, alias): 1695 ignore_functions = [ 1696 'vkEnumerateInstanceVersion', 1697 ] 1698 1699 if name in ignore_functions: 1700 return 1701 1702 if self.header: # In the header declare all intercepts 1703 self.appendSection('command', '') 1704 self.appendSection('command', self.makeCDecls(cmdinfo.elem)[0]) 1705 if (self.featureExtraProtect != None): 1706 self.layer_factory += '#ifdef %s\n' % self.featureExtraProtect 1707 # Update base class with virtual function declarations 1708 if 'ValidationCache' not in name: 1709 self.layer_factory += self.BaseClassCdecl(cmdinfo.elem, name) 1710 if (self.featureExtraProtect != None): 1711 self.layer_factory += '#endif\n' 1712 return 1713 1714 is_instance = 'false' 1715 dispatchable_type = cmdinfo.elem.find('param/type').text 1716 if dispatchable_type in ["VkPhysicalDevice", "VkInstance"] or name == 'vkCreateInstance': 1717 is_instance = 'true' 1718 1719 if name in self.manual_functions: 1720 if 'ValidationCache' not in name: 1721 self.intercepts += [ ' {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ] 1722 else: 1723 self.intercepts += [ '#ifdef BUILD_CORE_VALIDATION' ] 1724 1725 self.intercepts += [ ' {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ] 1726 self.intercepts += [ '#endif' ] 1727 return 1728 # Record that the function will be intercepted 1729 if (self.featureExtraProtect != None): 1730 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ] 1731 self.intercepts += [ ' {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ] 1732 if (self.featureExtraProtect != None): 1733 self.intercepts += [ '#endif' ] 1734 OutputGenerator.genCmd(self, cmdinfo, name, alias) 1735 # 1736 decls = self.makeCDecls(cmdinfo.elem) 1737 self.appendSection('command', '') 1738 self.appendSection('command', '%s {' % decls[0][:-1]) 1739 # Setup common to call wrappers. First parameter is always dispatchable 1740 dispatchable_name = cmdinfo.elem.find('param/name').text 1741 self.appendSection('command', ' auto layer_data = GetLayerDataPtr(get_dispatch_key(%s), layer_data_map);' % (dispatchable_name)) 1742 api_function_name = cmdinfo.elem.attrib.get('name') 1743 params = cmdinfo.elem.findall('param/name') 1744 paramstext = ', '.join([str(param.text) for param in params]) 1745 API = api_function_name.replace('vk','Dispatch') + '(' 1746 1747 # Declare result variable, if any. 1748 return_map = { 1749 'PFN_vkVoidFunction': 'return nullptr;', 1750 'VkBool32': 'return VK_FALSE;', 1751 'VkDeviceAddress': 'return 0;', 1752 'VkResult': 'return VK_ERROR_VALIDATION_FAILED_EXT;', 1753 'void': 'return;', 1754 'uint32_t': 'return 0;' 1755 } 1756 resulttype = cmdinfo.elem.find('proto/type') 1757 assignresult = '' 1758 if (resulttype.text != 'void'): 1759 assignresult = resulttype.text + ' result = ' 1760 1761 # Set up skip and locking 1762 self.appendSection('command', ' bool skip = false;') 1763 1764 # Generate pre-call validation source code 1765 self.appendSection('command', ' %s' % self.precallvalidate_loop) 1766 self.appendSection('command', ' auto lock = intercept->write_lock();') 1767 self.appendSection('command', ' skip |= intercept->PreCallValidate%s(%s);' % (api_function_name[2:], paramstext)) 1768 self.appendSection('command', ' if (skip) %s' % return_map[resulttype.text]) 1769 self.appendSection('command', ' }') 1770 1771 # Generate pre-call state recording source code 1772 self.appendSection('command', ' %s' % self.precallrecord_loop) 1773 self.appendSection('command', ' auto lock = intercept->write_lock();') 1774 self.appendSection('command', ' intercept->PreCallRecord%s(%s);' % (api_function_name[2:], paramstext)) 1775 self.appendSection('command', ' }') 1776 1777 # Insert pre-dispatch debug utils function call 1778 if name in self.pre_dispatch_debug_utils_functions: 1779 self.appendSection('command', ' %s' % self.pre_dispatch_debug_utils_functions[name]) 1780 1781 # Output dispatch (down-chain) function call 1782 self.appendSection('command', ' ' + assignresult + API + paramstext + ');') 1783 1784 # Insert post-dispatch debug utils function call 1785 if name in self.post_dispatch_debug_utils_functions: 1786 self.appendSection('command', ' %s' % self.post_dispatch_debug_utils_functions[name]) 1787 1788 # Generate post-call object processing source code 1789 self.appendSection('command', ' %s' % self.postcallrecord_loop) 1790 returnparam = '' 1791 if (resulttype.text == 'VkResult'): 1792 returnparam = ', result' 1793 self.appendSection('command', ' auto lock = intercept->write_lock();') 1794 self.appendSection('command', ' intercept->PostCallRecord%s(%s%s);' % (api_function_name[2:], paramstext, returnparam)) 1795 self.appendSection('command', ' }') 1796 # Return result variable, if any. 1797 if (resulttype.text != 'void'): 1798 self.appendSection('command', ' return result;') 1799 self.appendSection('command', '}') 1800 # 1801 # Override makeProtoName to drop the "vk" prefix 1802 def makeProtoName(self, name, tail): 1803 return self.genOpts.apientry + name[2:] + tail 1804