• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/python3 -i
2#
3# Copyright (c) 2015-2019 Valve Corporation
4# Copyright (c) 2015-2019 LunarG, Inc.
5# Copyright (c) 2015-2019 Google Inc.
6#
7# Licensed under the Apache License, Version 2.0 (the "License");
8# you may not use this file except in compliance with the License.
9# You may obtain a copy of the License at
10#
11#     http://www.apache.org/licenses/LICENSE-2.0
12#
13# Unless required by applicable law or agreed to in writing, software
14# distributed under the License is distributed on an "AS IS" BASIS,
15# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16# See the License for the specific language governing permissions and
17# limitations under the License.
18#
19# Author: Tobin Ehlis <tobine@google.com>
20# Author: Mark Lobodzinski <mark@lunarg.com>
21#
22# This script generates the dispatch portion of a factory layer which intercepts
23# all Vulkan  functions. The resultant factory layer allows rapid development of
24# layers and interceptors.
25
26import os,re,sys
27from generator import *
28from common_codegen import *
29
30# LayerFactoryGeneratorOptions - subclass of GeneratorOptions.
31#
32# Adds options used by LayerFactoryOutputGenerator objects during factory
33# layer generation.
34#
35# Additional members
36#   prefixText - list of strings to prefix generated header with
37#     (usually a copyright statement + calling convention macros).
38#   protectFile - True if multiple inclusion protection should be
39#     generated (based on the filename) around the entire header.
40#   protectFeature - True if #ifndef..#endif protection should be
41#     generated around a feature interface in the header file.
42#   genFuncPointers - True if function pointer typedefs should be
43#     generated
44#   protectProto - If conditional protection should be generated
45#     around prototype declarations, set to either '#ifdef'
46#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
47#     to require opt-out (#ifndef protectProtoStr). Otherwise
48#     set to None.
49#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
50#     declarations, if protectProto is set
51#   apicall - string to use for the function declaration prefix,
52#     such as APICALL on Windows.
53#   apientry - string to use for the calling convention macro,
54#     in typedefs, such as APIENTRY.
55#   apientryp - string to use for the calling convention macro
56#     in function pointer typedefs, such as APIENTRYP.
57#   indentFuncProto - True if prototype declarations should put each
58#     parameter on a separate line
59#   indentFuncPointer - True if typedefed function pointers should put each
60#     parameter on a separate line
61#   alignFuncParam - if nonzero and parameters are being put on a
62#     separate line, align parameter names at the specified column
63class LayerChassisGeneratorOptions(GeneratorOptions):
64    def __init__(self,
65                 filename = None,
66                 directory = '.',
67                 apiname = None,
68                 profile = None,
69                 versions = '.*',
70                 emitversions = '.*',
71                 defaultExtensions = None,
72                 addExtensions = None,
73                 removeExtensions = None,
74                 emitExtensions = None,
75                 sortProcedure = regSortFeatures,
76                 prefixText = "",
77                 genFuncPointers = True,
78                 protectFile = True,
79                 protectFeature = True,
80                 apicall = '',
81                 apientry = '',
82                 apientryp = '',
83                 indentFuncProto = True,
84                 indentFuncPointer = False,
85                 alignFuncParam = 0,
86                 helper_file_type = '',
87                 expandEnumerants = True):
88        GeneratorOptions.__init__(self, filename, directory, apiname, profile,
89                                  versions, emitversions, defaultExtensions,
90                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
91        self.prefixText      = prefixText
92        self.genFuncPointers = genFuncPointers
93        self.protectFile     = protectFile
94        self.protectFeature  = protectFeature
95        self.apicall         = apicall
96        self.apientry        = apientry
97        self.apientryp       = apientryp
98        self.indentFuncProto = indentFuncProto
99        self.indentFuncPointer = indentFuncPointer
100        self.alignFuncParam  = alignFuncParam
101
102# LayerChassisOutputGenerator - subclass of OutputGenerator.
103# Generates a LayerFactory layer that intercepts all API entrypoints
104#  This is intended to be used as a starting point for creating custom layers
105#
106# ---- methods ----
107# LayerChassisOutputGenerator(errFile, warnFile, diagFile) - args as for
108#   OutputGenerator. Defines additional internal state.
109# ---- methods overriding base class ----
110# beginFile(genOpts)
111# endFile()
112# beginFeature(interface, emit)
113# endFeature()
114# genType(typeinfo,name)
115# genStruct(typeinfo,name)
116# genGroup(groupinfo,name)
117# genEnum(enuminfo, name)
118# genCmd(cmdinfo)
119class LayerChassisOutputGenerator(OutputGenerator):
120    """Generate specified API interfaces in a specific style, such as a C header"""
121    # This is an ordered list of sections in the header file.
122    TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
123                     'group', 'bitmask', 'funcpointer', 'struct']
124    ALL_SECTIONS = TYPE_SECTIONS + ['command']
125
126    manual_functions = [
127        # Include functions here to be interecpted w/ manually implemented function bodies
128        'vkGetDeviceProcAddr',
129        'vkGetInstanceProcAddr',
130        'vkGetPhysicalDeviceProcAddr',
131        'vkCreateDevice',
132        'vkDestroyDevice',
133        'vkCreateInstance',
134        'vkDestroyInstance',
135        'vkEnumerateInstanceLayerProperties',
136        'vkEnumerateInstanceExtensionProperties',
137        'vkEnumerateDeviceLayerProperties',
138        'vkEnumerateDeviceExtensionProperties',
139        # Functions that are handled explicitly due to chassis architecture violations
140        'vkCreateGraphicsPipelines',
141        'vkCreateComputePipelines',
142        'vkCreateRayTracingPipelinesNV',
143        'vkCreatePipelineLayout',
144        'vkCreateShaderModule',
145        'vkAllocateDescriptorSets',
146        # ValidationCache functions do not get dispatched
147        'vkCreateValidationCacheEXT',
148        'vkDestroyValidationCacheEXT',
149        'vkMergeValidationCachesEXT',
150        'vkGetValidationCacheDataEXT',
151        ]
152
153    alt_ret_codes = [
154        # Include functions here which must tolerate VK_INCOMPLETE as a return code
155        'vkEnumeratePhysicalDevices',
156        'vkEnumeratePhysicalDeviceGroupsKHR',
157        'vkGetValidationCacheDataEXT',
158        'vkGetPipelineCacheData',
159        'vkGetShaderInfoAMD',
160        'vkGetPhysicalDeviceDisplayPropertiesKHR',
161        'vkGetPhysicalDeviceDisplayProperties2KHR',
162        'vkGetPhysicalDeviceDisplayPlanePropertiesKHR',
163        'vkGetDisplayPlaneSupportedDisplaysKHR',
164        'vkGetDisplayModePropertiesKHR',
165        'vkGetDisplayModeProperties2KHR',
166        'vkGetPhysicalDeviceSurfaceFormatsKHR',
167        'vkGetPhysicalDeviceSurfacePresentModesKHR',
168        'vkGetPhysicalDevicePresentRectanglesKHR',
169        'vkGetPastPresentationTimingGOOGLE',
170        'vkGetSwapchainImagesKHR',
171        'vkEnumerateInstanceLayerProperties',
172        'vkEnumerateDeviceLayerProperties',
173        'vkEnumerateInstanceExtensionProperties',
174        'vkEnumerateDeviceExtensionProperties',
175        'vkGetPhysicalDeviceCalibrateableTimeDomainsEXT',
176    ]
177
178    pre_dispatch_debug_utils_functions = {
179        'vkDebugMarkerSetObjectNameEXT' : 'layer_data->report_data->DebugReportSetMarkerObjectName(pNameInfo);',
180        'vkSetDebugUtilsObjectNameEXT' : 'layer_data->report_data->DebugReportSetUtilsObjectName(pNameInfo);',
181        'vkQueueBeginDebugUtilsLabelEXT' : 'BeginQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);',
182        'vkQueueInsertDebugUtilsLabelEXT' : 'InsertQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);',
183        'vkCmdBeginDebugUtilsLabelEXT' : 'BeginCmdDebugUtilsLabel(layer_data->report_data, commandBuffer, pLabelInfo);',
184        'vkCmdInsertDebugUtilsLabelEXT' : 'InsertCmdDebugUtilsLabel(layer_data->report_data, commandBuffer, pLabelInfo);'
185        }
186
187    post_dispatch_debug_utils_functions = {
188        'vkQueueEndDebugUtilsLabelEXT' : 'EndQueueDebugUtilsLabel(layer_data->report_data, queue);',
189        'vkCmdEndDebugUtilsLabelEXT' : 'EndCmdDebugUtilsLabel(layer_data->report_data, commandBuffer);',
190        'vkCmdInsertDebugUtilsLabelEXT' : 'InsertCmdDebugUtilsLabel(layer_data->report_data, commandBuffer, pLabelInfo);',
191        'vkCreateDebugReportCallbackEXT' : 'layer_create_report_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pCallback);',
192        'vkDestroyDebugReportCallbackEXT' : 'layer_destroy_report_callback(layer_data->report_data, callback, pAllocator);',
193        'vkCreateDebugUtilsMessengerEXT' : 'layer_create_messenger_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pMessenger);',
194        'vkDestroyDebugUtilsMessengerEXT' : 'layer_destroy_messenger_callback(layer_data->report_data, messenger, pAllocator);',
195        }
196
197    precallvalidate_loop = "for (auto intercept : layer_data->object_dispatch) {"
198    precallrecord_loop = precallvalidate_loop
199    postcallrecord_loop = "for (auto intercept : layer_data->object_dispatch) {"
200
201    inline_custom_header_preamble = """
202#define NOMINMAX
203#include <mutex>
204#include <cinttypes>
205#include <stdio.h>
206#include <stdlib.h>
207#include <string.h>
208#include <unordered_map>
209#include <unordered_set>
210#include <algorithm>
211#include <memory>
212
213#include "vk_loader_platform.h"
214#include "vulkan/vulkan.h"
215#include "vk_layer_config.h"
216#include "vk_layer_data.h"
217#include "vk_layer_logging.h"
218#include "vk_object_types.h"
219#include "vulkan/vk_layer.h"
220#include "vk_enum_string_helper.h"
221#include "vk_layer_extension_utils.h"
222#include "vk_layer_utils.h"
223#include "vulkan/vk_layer.h"
224#include "vk_dispatch_table_helper.h"
225#include "vk_validation_error_messages.h"
226#include "vk_extension_helper.h"
227#include "vk_safe_struct.h"
228#include "vk_typemap_helper.h"
229
230
231extern uint64_t global_unique_id;
232extern std::unordered_map<uint64_t, uint64_t> unique_id_mapping;
233"""
234
235    inline_custom_header_class_definition = """
236
237// Layer object type identifiers
238enum LayerObjectTypeId {
239    LayerObjectTypeInstance,                    // Container for an instance dispatch object
240    LayerObjectTypeDevice,                      // Container for a device dispatch object
241    LayerObjectTypeThreading,                   // Instance or device threading layer object
242    LayerObjectTypeParameterValidation,         // Instance or device parameter validation layer object
243    LayerObjectTypeObjectTracker,               // Instance or device object tracker layer object
244    LayerObjectTypeCoreValidation,              // Instance or device core validation layer object
245};
246
247struct TEMPLATE_STATE {
248    VkDescriptorUpdateTemplateKHR desc_update_template;
249    safe_VkDescriptorUpdateTemplateCreateInfo create_info;
250
251    TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo)
252        : desc_update_template(update_template), create_info(*pCreateInfo) {}
253};
254
255class LAYER_PHYS_DEV_PROPERTIES {
256public:
257    VkPhysicalDeviceProperties properties;
258    std::vector<VkQueueFamilyProperties> queue_family_properties;
259};
260
261// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
262// The end goal is to have all checks guarded by a bool. The bools are all "false" by default meaning that all checks
263// are enabled. At CreateInstance time, the user can use the VK_EXT_validation_flags extension to pass in enum values
264// of VkValidationCheckEXT that will selectively disable checks.
265// The VK_EXT_validation_features extension can also be used with the VkValidationFeaturesEXT structure to set
266// disables in the CHECK_DISABLED struct and/or enables in the CHECK_ENABLED struct.
267struct CHECK_DISABLED {
268    bool command_buffer_state;
269    bool create_descriptor_set_layout;
270    bool destroy_buffer_view;       // Skip validation at DestroyBufferView time
271    bool destroy_image_view;        // Skip validation at DestroyImageView time
272    bool destroy_pipeline;          // Skip validation at DestroyPipeline time
273    bool destroy_descriptor_pool;   // Skip validation at DestroyDescriptorPool time
274    bool destroy_framebuffer;       // Skip validation at DestroyFramebuffer time
275    bool destroy_renderpass;        // Skip validation at DestroyRenderpass time
276    bool destroy_image;             // Skip validation at DestroyImage time
277    bool destroy_sampler;           // Skip validation at DestroySampler time
278    bool destroy_command_pool;      // Skip validation at DestroyCommandPool time
279    bool destroy_event;             // Skip validation at DestroyEvent time
280    bool free_memory;               // Skip validation at FreeMemory time
281    bool object_in_use;             // Skip all object in_use checking
282    bool idle_descriptor_set;       // Skip check to verify that descriptor set is no in-use
283    bool push_constant_range;       // Skip push constant range checks
284    bool free_descriptor_sets;      // Skip validation prior to vkFreeDescriptorSets()
285    bool allocate_descriptor_sets;  // Skip validation prior to vkAllocateDescriptorSets()
286    bool update_descriptor_sets;    // Skip validation prior to vkUpdateDescriptorSets()
287    bool wait_for_fences;
288    bool get_fence_state;
289    bool queue_wait_idle;
290    bool device_wait_idle;
291    bool destroy_fence;
292    bool destroy_semaphore;
293    bool destroy_query_pool;
294    bool get_query_pool_results;
295    bool destroy_buffer;
296    bool shader_validation;  // Skip validation for shaders
297
298    void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
299};
300
301struct CHECK_ENABLED {
302    bool gpu_validation;
303    bool gpu_validation_reserve_binding_slot;
304
305    void SetAll(bool value) { std::fill(&gpu_validation, &gpu_validation_reserve_binding_slot + 1, value); }
306};
307
308// Layer chassis validation object base class definition
309class ValidationObject {
310    public:
311        uint32_t api_version;
312        debug_report_data* report_data = nullptr;
313        std::vector<VkDebugReportCallbackEXT> logging_callback;
314        std::vector<VkDebugUtilsMessengerEXT> logging_messenger;
315
316        VkLayerInstanceDispatchTable instance_dispatch_table;
317        VkLayerDispatchTable device_dispatch_table;
318
319        InstanceExtensions instance_extensions;
320        DeviceExtensions device_extensions = {};
321        CHECK_DISABLED disabled = {};
322        CHECK_ENABLED enabled = {};
323
324        VkInstance instance = VK_NULL_HANDLE;
325        VkPhysicalDevice physical_device = VK_NULL_HANDLE;
326        VkDevice device = VK_NULL_HANDLE;
327        LAYER_PHYS_DEV_PROPERTIES phys_dev_properties = {};
328
329        std::vector<ValidationObject*> object_dispatch;
330        LayerObjectTypeId container_type;
331
332        std::string layer_name = "CHASSIS";
333
334        // Constructor
335        ValidationObject(){};
336        // Destructor
337        virtual ~ValidationObject() {};
338
339        std::mutex validation_object_mutex;
340        virtual std::unique_lock<std::mutex> write_lock() {
341            return std::unique_lock<std::mutex>(validation_object_mutex);
342        }
343
344        ValidationObject* GetValidationObject(std::vector<ValidationObject*>& object_dispatch, LayerObjectTypeId object_type) {
345            for (auto validation_object : object_dispatch) {
346                if (validation_object->container_type == object_type) {
347                    return validation_object;
348                }
349            }
350            return nullptr;
351        };
352
353        // Handle Wrapping Data
354        // Reverse map display handles
355        std::unordered_map<VkDisplayKHR, uint64_t> display_id_reverse_mapping;
356        std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_map;
357        struct SubpassesUsageStates {
358            std::unordered_set<uint32_t> subpasses_using_color_attachment;
359            std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
360        };
361        // Uses unwrapped handles
362        std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
363        // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs
364        // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist
365        std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map;
366        // Map of wrapped descriptor pools to set of wrapped descriptor sets allocated from each pool
367        std::unordered_map<VkDescriptorPool, std::unordered_set<VkDescriptorSet>> pool_descriptor_sets_map;
368
369
370        // Unwrap a handle.  Must hold lock.
371        template <typename HandleType>
372        HandleType Unwrap(HandleType wrappedHandle) {
373            // TODO: don't use operator[] here.
374            return (HandleType)unique_id_mapping[reinterpret_cast<uint64_t const &>(wrappedHandle)];
375        }
376
377        // Wrap a newly created handle with a new unique ID, and return the new ID -- must hold lock.
378        template <typename HandleType>
379        HandleType WrapNew(HandleType newlyCreatedHandle) {
380            auto unique_id = global_unique_id++;
381            unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle);
382            return (HandleType)unique_id;
383        }
384
385        // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup. Must hold lock.
386        VkDisplayKHR WrapDisplay(VkDisplayKHR newlyCreatedHandle, ValidationObject *map_data) {
387            auto unique_id = global_unique_id++;
388            unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle);
389            map_data->display_id_reverse_mapping[newlyCreatedHandle] = unique_id;
390            return (VkDisplayKHR)unique_id;
391        }
392
393        // VkDisplayKHR objects don't have a single point of creation, so we need to see if one already exists in the map before
394        // creating another. Must hold lock.
395        VkDisplayKHR MaybeWrapDisplay(VkDisplayKHR handle, ValidationObject *map_data) {
396            // See if this display is already known
397            auto it = map_data->display_id_reverse_mapping.find(handle);
398            if (it != map_data->display_id_reverse_mapping.end()) return (VkDisplayKHR)it->second;
399            // Unknown, so wrap
400            return WrapDisplay(handle, map_data);
401        }
402
403        // Pre/post hook point declarations
404"""
405
406    inline_copyright_message = """
407// This file is ***GENERATED***.  Do Not Edit.
408// See layer_chassis_generator.py for modifications.
409
410/* Copyright (c) 2015-2019 The Khronos Group Inc.
411 * Copyright (c) 2015-2019 Valve Corporation
412 * Copyright (c) 2015-2019 LunarG, Inc.
413 * Copyright (c) 2015-2019 Google Inc.
414 *
415 * Licensed under the Apache License, Version 2.0 (the "License");
416 * you may not use this file except in compliance with the License.
417 * You may obtain a copy of the License at
418 *
419 *     http://www.apache.org/licenses/LICENSE-2.0
420 *
421 * Unless required by applicable law or agreed to in writing, software
422 * distributed under the License is distributed on an "AS IS" BASIS,
423 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
424 * See the License for the specific language governing permissions and
425 * limitations under the License.
426 *
427 * Author: Mark Lobodzinski <mark@lunarg.com>
428 */"""
429
430    inline_custom_source_preamble = """
431
432#include <string.h>
433#include <mutex>
434
435#define VALIDATION_ERROR_MAP_IMPL
436
437#include "chassis.h"
438#include "layer_chassis_dispatch.h"
439
440std::unordered_map<void*, ValidationObject*> layer_data_map;
441
442// Global unique object identifier.  All increments must be guarded by a lock.
443uint64_t global_unique_id = 1;
444// Map uniqueID to actual object handle
445std::unordered_map<uint64_t, uint64_t> unique_id_mapping;
446
447// TODO: This variable controls handle wrapping -- in the future it should be hooked
448//       up to the new VALIDATION_FEATURES extension. Temporarily, control with a compile-time flag.
449#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)
450bool wrap_handles = true;
451#else
452const bool wrap_handles = false;
453#endif
454
455// Include child object (layer) definitions
456#if BUILD_OBJECT_TRACKER
457#include "object_lifetime_validation.h"
458#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_object_tracker"
459#elif BUILD_THREAD_SAFETY
460#include "thread_safety.h"
461#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_threading"
462#elif BUILD_PARAMETER_VALIDATION
463#include "stateless_validation.h"
464#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_parameter_validation"
465#elif BUILD_CORE_VALIDATION
466#include "core_validation.h"
467#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_core_validation"
468#else
469#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_unique_objects"
470#endif
471
472namespace vulkan_layer_chassis {
473
474using std::unordered_map;
475
476static const VkLayerProperties global_layer = {
477    OBJECT_LAYER_NAME, VK_LAYER_API_VERSION, 1, "LunarG validation Layer",
478};
479
480static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
481
482extern const std::unordered_map<std::string, void*> name_to_funcptr_map;
483
484
485// Manually written functions
486
487// Check enabled instance extensions against supported instance extension whitelist
488static void InstanceExtensionWhitelist(ValidationObject *layer_data, const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
489    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
490        // Check for recognized instance extensions
491        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
492            log_msg(layer_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
493                    kVUIDUndefined,
494                    "Instance Extension %s is not supported by this layer.  Using this extension may adversely affect validation "
495                    "results and/or produce undefined behavior.",
496                    pCreateInfo->ppEnabledExtensionNames[i]);
497        }
498    }
499}
500
501// Check enabled device extensions against supported device extension whitelist
502static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
503    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
504        // Check for recognized device extensions
505        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
506            log_msg(layer_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
507                    kVUIDUndefined,
508                    "Device Extension %s is not supported by this layer.  Using this extension may adversely affect validation "
509                    "results and/or produce undefined behavior.",
510                    pCreateInfo->ppEnabledExtensionNames[i]);
511        }
512    }
513}
514
515// For the given ValidationCheck enum, set all relevant instance disabled flags to true
516void SetDisabledFlags(ValidationObject *instance_data, const VkValidationFlagsEXT *val_flags_struct) {
517    for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) {
518        switch (val_flags_struct->pDisabledValidationChecks[i]) {
519        case VK_VALIDATION_CHECK_SHADERS_EXT:
520            instance_data->disabled.shader_validation = true;
521            break;
522        case VK_VALIDATION_CHECK_ALL_EXT:
523            // Set all disabled flags to true
524            instance_data->disabled.SetAll(true);
525            break;
526        default:
527            break;
528        }
529    }
530}
531
532void SetValidationFeatures(ValidationObject *instance_data, const VkValidationFeaturesEXT *val_features_struct) {
533    for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) {
534        switch (val_features_struct->pDisabledValidationFeatures[i]) {
535        case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
536            instance_data->disabled.shader_validation = true;
537            break;
538        case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
539            // Set all disabled flags to true
540            instance_data->disabled.SetAll(true);
541            break;
542        default:
543            break;
544        }
545    }
546    for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) {
547        switch (val_features_struct->pEnabledValidationFeatures[i]) {
548        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
549            instance_data->enabled.gpu_validation = true;
550            break;
551        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
552            instance_data->enabled.gpu_validation_reserve_binding_slot = true;
553            break;
554        default:
555            break;
556        }
557    }
558}
559
560VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
561    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
562    if (!ApiParentExtensionEnabled(funcName, layer_data->device_extensions.device_extension_set)) {
563        return nullptr;
564    }
565    const auto &item = name_to_funcptr_map.find(funcName);
566    if (item != name_to_funcptr_map.end()) {
567        return reinterpret_cast<PFN_vkVoidFunction>(item->second);
568    }
569    auto &table = layer_data->device_dispatch_table;
570    if (!table.GetDeviceProcAddr) return nullptr;
571    return table.GetDeviceProcAddr(device, funcName);
572}
573
574VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
575    const auto &item = name_to_funcptr_map.find(funcName);
576    if (item != name_to_funcptr_map.end()) {
577        return reinterpret_cast<PFN_vkVoidFunction>(item->second);
578    }
579    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
580    auto &table = layer_data->instance_dispatch_table;
581    if (!table.GetInstanceProcAddr) return nullptr;
582    return table.GetInstanceProcAddr(instance, funcName);
583}
584
585VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
586    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
587    auto &table = layer_data->instance_dispatch_table;
588    if (!table.GetPhysicalDeviceProcAddr) return nullptr;
589    return table.GetPhysicalDeviceProcAddr(instance, funcName);
590}
591
592VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
593    return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
594}
595
596VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
597                                                              VkLayerProperties *pProperties) {
598    return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
599}
600
601VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
602                                                                    VkExtensionProperties *pProperties) {
603    if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
604        return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
605
606    return VK_ERROR_LAYER_NOT_PRESENT;
607}
608
609VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
610                                                                  uint32_t *pCount, VkExtensionProperties *pProperties) {
611    if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(0, NULL, pCount, pProperties);
612    assert(physicalDevice);
613    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
614    return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
615}
616
617VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
618                                              VkInstance *pInstance) {
619    VkLayerInstanceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
620
621    assert(chain_info->u.pLayerInfo);
622    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
623    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
624    if (fpCreateInstance == NULL) return VK_ERROR_INITIALIZATION_FAILED;
625    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
626    uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0);
627    uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
628
629
630    // Create temporary dispatch vector for pre-calls until instance is created
631    std::vector<ValidationObject*> local_object_dispatch;
632#if BUILD_OBJECT_TRACKER
633    auto object_tracker = new ObjectLifetimes;
634    local_object_dispatch.emplace_back(object_tracker);
635    object_tracker->container_type = LayerObjectTypeObjectTracker;
636    object_tracker->api_version = api_version;
637#elif BUILD_THREAD_SAFETY
638    auto thread_checker = new ThreadSafety;
639    local_object_dispatch.emplace_back(thread_checker);
640    thread_checker->container_type = LayerObjectTypeThreading;
641    thread_checker->api_version = api_version;
642#elif BUILD_PARAMETER_VALIDATION
643    auto parameter_validation = new StatelessValidation;
644    local_object_dispatch.emplace_back(parameter_validation);
645    parameter_validation->container_type = LayerObjectTypeParameterValidation;
646    parameter_validation->api_version = api_version;
647#elif BUILD_CORE_VALIDATION
648    auto core_checks = new CoreChecks;
649    local_object_dispatch.emplace_back(core_checks);
650    core_checks->container_type = LayerObjectTypeCoreValidation;
651    core_checks->api_version = api_version;
652#endif
653
654    // Init dispatch array and call registration functions
655    for (auto intercept : local_object_dispatch) {
656        intercept->PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance);
657    }
658    for (auto intercept : local_object_dispatch) {
659        intercept->PreCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance);
660    }
661
662    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
663    if (result != VK_SUCCESS) return result;
664
665    auto framework = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map);
666
667    framework->object_dispatch = local_object_dispatch;
668    framework->container_type = LayerObjectTypeInstance;
669
670    framework->instance = *pInstance;
671    layer_init_instance_dispatch_table(*pInstance, &framework->instance_dispatch_table, fpGetInstanceProcAddr);
672    framework->report_data = debug_utils_create_instance(&framework->instance_dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount,
673                                                         pCreateInfo->ppEnabledExtensionNames);
674    framework->api_version = api_version;
675    framework->instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo);
676
677    // Parse any pNext chains for validation features and flags
678    const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext);
679    if (validation_flags_ext) {
680        SetDisabledFlags(framework, validation_flags_ext);
681    }
682    const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext);
683    if (validation_features_ext) {
684        SetValidationFeatures(framework, validation_features_ext);
685    }
686
687#if BUILD_OBJECT_TRACKER
688    layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_object_tracker");
689    object_tracker->report_data = framework->report_data;
690#elif BUILD_THREAD_SAFETY
691    layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "google_thread_checker");
692    thread_checker->report_data = framework->report_data;
693#elif BUILD_PARAMETER_VALIDATION
694    layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_parameter_validation");
695    parameter_validation->report_data = framework->report_data;
696#elif BUILD_CORE_VALIDATION
697    layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_core_validation");
698    core_checks->report_data = framework->report_data;
699    core_checks->instance_dispatch_table = framework->instance_dispatch_table;
700    core_checks->instance = *pInstance;
701    core_checks->enabled = framework->enabled;
702    core_checks->disabled = framework->disabled;
703    core_checks->instance_state = core_checks;
704#else
705    layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_unique_objects");
706#endif
707
708    for (auto intercept : framework->object_dispatch) {
709        intercept->PostCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance, result);
710    }
711
712    InstanceExtensionWhitelist(framework, pCreateInfo, *pInstance);
713
714    return result;
715}
716
717VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
718    dispatch_key key = get_dispatch_key(instance);
719    auto layer_data = GetLayerDataPtr(key, layer_data_map);
720    """ + precallvalidate_loop + """
721        auto lock = intercept->write_lock();
722        intercept->PreCallValidateDestroyInstance(instance, pAllocator);
723    }
724    """ + precallrecord_loop + """
725        auto lock = intercept->write_lock();
726        intercept->PreCallRecordDestroyInstance(instance, pAllocator);
727    }
728
729    layer_data->instance_dispatch_table.DestroyInstance(instance, pAllocator);
730
731    """ + postcallrecord_loop + """
732        auto lock = intercept->write_lock();
733        intercept->PostCallRecordDestroyInstance(instance, pAllocator);
734    }
735    // Clean up logging callback, if any
736    while (layer_data->logging_messenger.size() > 0) {
737        VkDebugUtilsMessengerEXT messenger = layer_data->logging_messenger.back();
738        layer_destroy_messenger_callback(layer_data->report_data, messenger, pAllocator);
739        layer_data->logging_messenger.pop_back();
740    }
741    while (layer_data->logging_callback.size() > 0) {
742        VkDebugReportCallbackEXT callback = layer_data->logging_callback.back();
743        layer_destroy_report_callback(layer_data->report_data, callback, pAllocator);
744        layer_data->logging_callback.pop_back();
745    }
746
747    layer_debug_utils_destroy_instance(layer_data->report_data);
748
749    for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
750        delete *item;
751    }
752    FreeLayerDataPtr(key, layer_data_map);
753}
754
755VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
756                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
757    VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
758
759    auto instance_interceptor = GetLayerDataPtr(get_dispatch_key(gpu), layer_data_map);
760
761    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
762    PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
763    PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(instance_interceptor->instance, "vkCreateDevice");
764    if (fpCreateDevice == NULL) {
765        return VK_ERROR_INITIALIZATION_FAILED;
766    }
767    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
768
769    // Get physical device limits for device
770    VkPhysicalDeviceProperties device_properties = {};
771    instance_interceptor->instance_dispatch_table.GetPhysicalDeviceProperties(gpu, &device_properties);
772
773    // Setup the validation tables based on the application API version from the instance and the capabilities of the device driver
774    uint32_t effective_api_version = std::min(device_properties.apiVersion, instance_interceptor->api_version);
775
776    DeviceExtensions device_extensions = {};
777    device_extensions.InitFromDeviceCreateInfo(&instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
778    for (auto item : instance_interceptor->object_dispatch) {
779        item->device_extensions = device_extensions;
780    }
781
782    std::unique_ptr<safe_VkDeviceCreateInfo> modified_create_info(new safe_VkDeviceCreateInfo(pCreateInfo));
783
784    bool skip = false;
785    for (auto intercept : instance_interceptor->object_dispatch) {
786        auto lock = intercept->write_lock();
787        skip |= intercept->PreCallValidateCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
788        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
789    }
790    for (auto intercept : instance_interceptor->object_dispatch) {
791        auto lock = intercept->write_lock();
792        intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, modified_create_info);
793    }
794
795    VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(modified_create_info.get()), pAllocator, pDevice);
796    if (result != VK_SUCCESS) {
797        return result;
798    }
799
800    auto device_interceptor = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
801    device_interceptor->container_type = LayerObjectTypeDevice;
802
803    // Save local info in device object
804    device_interceptor->phys_dev_properties.properties = device_properties;
805    device_interceptor->api_version = device_interceptor->device_extensions.InitFromDeviceCreateInfo(
806        &instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
807    device_interceptor->device_extensions = device_extensions;
808
809    layer_init_device_dispatch_table(*pDevice, &device_interceptor->device_dispatch_table, fpGetDeviceProcAddr);
810
811    device_interceptor->device = *pDevice;
812    device_interceptor->physical_device = gpu;
813    device_interceptor->instance = instance_interceptor->instance;
814    device_interceptor->report_data = layer_debug_utils_create_device(instance_interceptor->report_data, *pDevice);
815
816#if BUILD_OBJECT_TRACKER
817    // Create child layer objects for this key and add to dispatch vector
818    auto object_tracker = new ObjectLifetimes;
819    // TODO:  Initialize child objects with parent info thru constuctor taking a parent object
820    object_tracker->container_type = LayerObjectTypeObjectTracker;
821    object_tracker->physical_device = gpu;
822    object_tracker->instance = instance_interceptor->instance;
823    object_tracker->report_data = device_interceptor->report_data;
824    object_tracker->device_dispatch_table = device_interceptor->device_dispatch_table;
825    object_tracker->api_version = device_interceptor->api_version;
826    device_interceptor->object_dispatch.emplace_back(object_tracker);
827#elif BUILD_THREAD_SAFETY
828    auto thread_safety = new ThreadSafety;
829    // TODO:  Initialize child objects with parent info thru constuctor taking a parent object
830    thread_safety->container_type = LayerObjectTypeThreading;
831    thread_safety->physical_device = gpu;
832    thread_safety->instance = instance_interceptor->instance;
833    thread_safety->report_data = device_interceptor->report_data;
834    thread_safety->device_dispatch_table = device_interceptor->device_dispatch_table;
835    thread_safety->api_version = device_interceptor->api_version;
836    device_interceptor->object_dispatch.emplace_back(thread_safety);
837#elif BUILD_PARAMETER_VALIDATION
838    auto stateless_validation = new StatelessValidation;
839    // TODO:  Initialize child objects with parent info thru constuctor taking a parent object
840    stateless_validation->container_type = LayerObjectTypeParameterValidation;
841    stateless_validation->physical_device = gpu;
842    stateless_validation->instance = instance_interceptor->instance;
843    stateless_validation->report_data = device_interceptor->report_data;
844    stateless_validation->device_dispatch_table = device_interceptor->device_dispatch_table;
845    stateless_validation->api_version = device_interceptor->api_version;
846    device_interceptor->object_dispatch.emplace_back(stateless_validation);
847#elif BUILD_CORE_VALIDATION
848    auto core_checks = new CoreChecks;
849    // TODO:  Initialize child objects with parent info thru constuctor taking a parent object
850    core_checks->container_type = LayerObjectTypeCoreValidation;
851    core_checks->physical_device = gpu;
852    core_checks->instance = instance_interceptor->instance;
853    core_checks->report_data = device_interceptor->report_data;
854    core_checks->device_dispatch_table = device_interceptor->device_dispatch_table;
855    core_checks->instance_dispatch_table = instance_interceptor->instance_dispatch_table;
856    core_checks->api_version = device_interceptor->api_version;
857    core_checks->instance_extensions = instance_interceptor->instance_extensions;
858    core_checks->device_extensions = device_interceptor->device_extensions;
859    core_checks->instance_state = reinterpret_cast<CoreChecks *>(
860        core_checks->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeCoreValidation));
861    core_checks->device = *pDevice;
862    device_interceptor->object_dispatch.emplace_back(core_checks);
863#endif
864
865    for (auto intercept : instance_interceptor->object_dispatch) {
866        auto lock = intercept->write_lock();
867        intercept->PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
868    }
869
870    DeviceExtensionWhitelist(device_interceptor, pCreateInfo, *pDevice);
871
872    return result;
873}
874
875VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
876    dispatch_key key = get_dispatch_key(device);
877    auto layer_data = GetLayerDataPtr(key, layer_data_map);
878    """ + precallvalidate_loop + """
879        auto lock = intercept->write_lock();
880        intercept->PreCallValidateDestroyDevice(device, pAllocator);
881    }
882    """ + precallrecord_loop + """
883        auto lock = intercept->write_lock();
884        intercept->PreCallRecordDestroyDevice(device, pAllocator);
885    }
886    layer_debug_utils_destroy_device(device);
887
888    layer_data->device_dispatch_table.DestroyDevice(device, pAllocator);
889
890    """ + postcallrecord_loop + """
891        auto lock = intercept->write_lock();
892        intercept->PostCallRecordDestroyDevice(device, pAllocator);
893    }
894
895    for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
896        delete *item;
897    }
898    FreeLayerDataPtr(key, layer_data_map);
899}
900
901
902// Special-case APIs for which core_validation needs custom parameter lists and/or modifies parameters
903
904VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
905    VkDevice                                    device,
906    VkPipelineCache                             pipelineCache,
907    uint32_t                                    createInfoCount,
908    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
909    const VkAllocationCallbacks*                pAllocator,
910    VkPipeline*                                 pPipelines) {
911    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
912    bool skip = false;
913
914#ifdef BUILD_CORE_VALIDATION
915        create_graphics_pipeline_api_state cgpl_state{};
916#else
917        struct create_graphics_pipeline_api_state {
918            const VkGraphicsPipelineCreateInfo* pCreateInfos;
919        } cgpl_state;
920        cgpl_state.pCreateInfos = pCreateInfos;
921#endif
922
923    for (auto intercept : layer_data->object_dispatch) {
924        auto lock = intercept->write_lock();
925        skip |= intercept->PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state);
926        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
927    }
928    for (auto intercept : layer_data->object_dispatch) {
929        auto lock = intercept->write_lock();
930        intercept->PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state);
931    }
932
933    VkResult result = DispatchCreateGraphicsPipelines(layer_data, device, pipelineCache, createInfoCount, cgpl_state.pCreateInfos, pAllocator, pPipelines);
934
935    for (auto intercept : layer_data->object_dispatch) {
936        auto lock = intercept->write_lock();
937        intercept->PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &cgpl_state);
938    }
939    return result;
940}
941
942// This API saves some core_validation pipeline state state on the stack for performance purposes
943VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
944    VkDevice                                    device,
945    VkPipelineCache                             pipelineCache,
946    uint32_t                                    createInfoCount,
947    const VkComputePipelineCreateInfo*          pCreateInfos,
948    const VkAllocationCallbacks*                pAllocator,
949    VkPipeline*                                 pPipelines) {
950    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
951    bool skip = false;
952
953#ifndef BUILD_CORE_VALIDATION
954    struct PIPELINE_STATE {};
955#endif
956
957    std::vector<std::unique_ptr<PIPELINE_STATE>> pipe_state;
958
959    for (auto intercept : layer_data->object_dispatch) {
960        auto lock = intercept->write_lock();
961        skip |= intercept->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &pipe_state);
962        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
963    }
964    for (auto intercept : layer_data->object_dispatch) {
965        auto lock = intercept->write_lock();
966        intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
967    }
968    VkResult result = DispatchCreateComputePipelines(layer_data, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
969    for (auto intercept : layer_data->object_dispatch) {
970        auto lock = intercept->write_lock();
971        intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &pipe_state);
972    }
973    return result;
974}
975
976VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
977    VkDevice                                    device,
978    VkPipelineCache                             pipelineCache,
979    uint32_t                                    createInfoCount,
980    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
981    const VkAllocationCallbacks*                pAllocator,
982    VkPipeline*                                 pPipelines) {
983    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
984    bool skip = false;
985
986#ifndef BUILD_CORE_VALIDATION
987    struct PIPELINE_STATE {};
988#endif
989
990    std::vector<std::unique_ptr<PIPELINE_STATE>> pipe_state;
991
992    for (auto intercept : layer_data->object_dispatch) {
993        auto lock = intercept->write_lock();
994        skip |= intercept->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &pipe_state);
995        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
996    }
997    for (auto intercept : layer_data->object_dispatch) {
998        auto lock = intercept->write_lock();
999        intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1000    }
1001    VkResult result = DispatchCreateRayTracingPipelinesNV(layer_data, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1002    for (auto intercept : layer_data->object_dispatch) {
1003        auto lock = intercept->write_lock();
1004        intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &pipe_state);
1005    }
1006    return result;
1007}
1008
1009// This API needs the ability to modify a down-chain parameter
1010VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
1011    VkDevice                                    device,
1012    const VkPipelineLayoutCreateInfo*           pCreateInfo,
1013    const VkAllocationCallbacks*                pAllocator,
1014    VkPipelineLayout*                           pPipelineLayout) {
1015    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1016    bool skip = false;
1017
1018#ifndef BUILD_CORE_VALIDATION
1019    struct create_pipeline_layout_api_state {
1020        VkPipelineLayoutCreateInfo modified_create_info;
1021    };
1022#endif
1023    create_pipeline_layout_api_state cpl_state{};
1024    cpl_state.modified_create_info = *pCreateInfo;
1025
1026    for (auto intercept : layer_data->object_dispatch) {
1027        auto lock = intercept->write_lock();
1028        skip |= intercept->PreCallValidateCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1029        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
1030    }
1031    for (auto intercept : layer_data->object_dispatch) {
1032        auto lock = intercept->write_lock();
1033        intercept->PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state);
1034    }
1035    VkResult result = DispatchCreatePipelineLayout(layer_data, device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout);
1036    for (auto intercept : layer_data->object_dispatch) {
1037        auto lock = intercept->write_lock();
1038        intercept->PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
1039    }
1040    return result;
1041}
1042
1043// This API needs some local stack data for performance reasons and also may modify a parameter
1044VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
1045    VkDevice                                    device,
1046    const VkShaderModuleCreateInfo*             pCreateInfo,
1047    const VkAllocationCallbacks*                pAllocator,
1048    VkShaderModule*                             pShaderModule) {
1049    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1050    bool skip = false;
1051
1052#ifndef BUILD_CORE_VALIDATION
1053    struct create_shader_module_api_state {
1054        VkShaderModuleCreateInfo instrumented_create_info;
1055    };
1056#endif
1057    create_shader_module_api_state csm_state{};
1058    csm_state.instrumented_create_info = *pCreateInfo;
1059
1060    for (auto intercept : layer_data->object_dispatch) {
1061        auto lock = intercept->write_lock();
1062        skip |= intercept->PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
1063        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
1064    }
1065    for (auto intercept : layer_data->object_dispatch) {
1066        auto lock = intercept->write_lock();
1067        intercept->PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
1068    }
1069    VkResult result = DispatchCreateShaderModule(layer_data, device, &csm_state.instrumented_create_info, pAllocator, pShaderModule);
1070    for (auto intercept : layer_data->object_dispatch) {
1071        auto lock = intercept->write_lock();
1072        intercept->PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result, &csm_state);
1073    }
1074    return result;
1075}
1076
1077VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
1078    VkDevice                                    device,
1079    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
1080    VkDescriptorSet*                            pDescriptorSets) {
1081    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1082    bool skip = false;
1083
1084#ifdef BUILD_CORE_VALIDATION
1085    cvdescriptorset::AllocateDescriptorSetsData ads_state(pAllocateInfo->descriptorSetCount);
1086#else
1087    struct ads_state {} ads_state;
1088#endif
1089
1090    for (auto intercept : layer_data->object_dispatch) {
1091        auto lock = intercept->write_lock();
1092        skip |= intercept->PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, &ads_state);
1093        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
1094    }
1095    for (auto intercept : layer_data->object_dispatch) {
1096        auto lock = intercept->write_lock();
1097        intercept->PreCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
1098    }
1099    VkResult result = DispatchAllocateDescriptorSets(layer_data, device, pAllocateInfo, pDescriptorSets);
1100    for (auto intercept : layer_data->object_dispatch) {
1101        auto lock = intercept->write_lock();
1102        intercept->PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result, &ads_state);
1103    }
1104    return result;
1105}
1106
1107
1108
1109
1110
1111// ValidationCache APIs do not dispatch
1112
1113VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
1114    VkDevice                                    device,
1115    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
1116    const VkAllocationCallbacks*                pAllocator,
1117    VkValidationCacheEXT*                       pValidationCache) {
1118    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1119    VkResult result = VK_SUCCESS;
1120
1121    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
1122    if (validation_data) {
1123        auto lock = validation_data->write_lock();
1124        result = validation_data->CoreLayerCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
1125    }
1126    return result;
1127}
1128
1129VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
1130    VkDevice                                    device,
1131    VkValidationCacheEXT                        validationCache,
1132    const VkAllocationCallbacks*                pAllocator) {
1133    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1134
1135    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
1136    if (validation_data) {
1137        auto lock = validation_data->write_lock();
1138        validation_data->CoreLayerDestroyValidationCacheEXT(device, validationCache, pAllocator);
1139    }
1140}
1141
1142VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
1143    VkDevice                                    device,
1144    VkValidationCacheEXT                        dstCache,
1145    uint32_t                                    srcCacheCount,
1146    const VkValidationCacheEXT*                 pSrcCaches) {
1147    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1148    VkResult result = VK_SUCCESS;
1149
1150    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
1151    if (validation_data) {
1152        auto lock = validation_data->write_lock();
1153        result = validation_data->CoreLayerMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
1154    }
1155    return result;
1156}
1157
1158VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
1159    VkDevice                                    device,
1160    VkValidationCacheEXT                        validationCache,
1161    size_t*                                     pDataSize,
1162    void*                                       pData) {
1163    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1164    VkResult result = VK_SUCCESS;
1165
1166    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
1167    if (validation_data) {
1168        auto lock = validation_data->write_lock();
1169        result = validation_data->CoreLayerGetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
1170    }
1171    return result;
1172
1173}"""
1174
1175    inline_custom_validation_class_definitions = """
1176        virtual VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
1177        virtual void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {};
1178        virtual VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches)  { return VK_SUCCESS; };
1179        virtual VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData)  { return VK_SUCCESS; };
1180
1181        // Allow additional parameter for CreateGraphicsPipelines
1182        virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
1183            return PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1184        };
1185        virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
1186            PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1187        };
1188        virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* cgpl_state) {
1189            PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
1190        };
1191
1192        // Allow additional state parameter for CreateComputePipelines
1193        virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state)  {
1194            return PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1195        };
1196        virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
1197            PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
1198        };
1199
1200        // Allow additional state parameter for CreateRayTracingPipelinesNV
1201        virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state)  {
1202            return PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1203        };
1204        virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
1205            PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
1206        };
1207
1208        // Allow modification of a down-chain parameter for CreatePipelineLayout
1209        virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, void *cpl_state) {
1210            PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1211        };
1212
1213        // Enable the CreateShaderModule API to take an extra argument for state preservation and paramter modification
1214        virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state)  {
1215            return PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1216        };
1217        virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) {
1218            PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1219        };
1220        virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result, void* csm_state) {
1221            PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result);
1222        };
1223
1224        // Allow AllocateDescriptorSets to use some local stack storage for performance purposes
1225        virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, void* ads_state)  {
1226            return PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
1227        };
1228        virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state)  {
1229            PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result);
1230        };
1231
1232        // Modify a parameter to CreateDevice
1233        virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, std::unique_ptr<safe_VkDeviceCreateInfo> &modified_create_info) {
1234            PreCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
1235        };
1236"""
1237
1238    inline_custom_source_postamble = """
1239// loader-layer interface v0, just wrappers since there is only a layer
1240
1241VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
1242                                                                                      VkExtensionProperties *pProperties) {
1243    return vulkan_layer_chassis::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
1244}
1245
1246VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
1247                                                                                  VkLayerProperties *pProperties) {
1248    return vulkan_layer_chassis::EnumerateInstanceLayerProperties(pCount, pProperties);
1249}
1250
1251VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
1252                                                                                VkLayerProperties *pProperties) {
1253    // the layer command handles VK_NULL_HANDLE just fine internally
1254    assert(physicalDevice == VK_NULL_HANDLE);
1255    return vulkan_layer_chassis::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
1256}
1257
1258VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
1259                                                                                    const char *pLayerName, uint32_t *pCount,
1260                                                                                    VkExtensionProperties *pProperties) {
1261    // the layer command handles VK_NULL_HANDLE just fine internally
1262    assert(physicalDevice == VK_NULL_HANDLE);
1263    return vulkan_layer_chassis::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
1264}
1265
1266VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
1267    return vulkan_layer_chassis::GetDeviceProcAddr(dev, funcName);
1268}
1269
1270VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
1271    return vulkan_layer_chassis::GetInstanceProcAddr(instance, funcName);
1272}
1273
1274VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_layerGetPhysicalDeviceProcAddr(VkInstance instance,
1275                                                                                           const char *funcName) {
1276    return vulkan_layer_chassis::GetPhysicalDeviceProcAddr(instance, funcName);
1277}
1278
1279VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
1280    assert(pVersionStruct != NULL);
1281    assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
1282
1283    // Fill in the function pointers if our version is at least capable of having the structure contain them.
1284    if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
1285        pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
1286        pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr;
1287        pVersionStruct->pfnGetPhysicalDeviceProcAddr = vk_layerGetPhysicalDeviceProcAddr;
1288    }
1289
1290    return VK_SUCCESS;
1291}"""
1292
1293
1294    def __init__(self,
1295                 errFile = sys.stderr,
1296                 warnFile = sys.stderr,
1297                 diagFile = sys.stdout):
1298        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
1299        # Internal state - accumulators for different inner block text
1300        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1301        self.intercepts = []
1302        self.layer_factory = ''                     # String containing base layer factory class definition
1303
1304    # Check if the parameter passed in is a pointer to an array
1305    def paramIsArray(self, param):
1306        return param.attrib.get('len') is not None
1307
1308    # Check if the parameter passed in is a pointer
1309    def paramIsPointer(self, param):
1310        ispointer = False
1311        for elem in param:
1312            if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
1313                ispointer = True
1314        return ispointer
1315
1316    # Check if an object is a non-dispatchable handle
1317    def isHandleTypeNonDispatchable(self, handletype):
1318        handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1319        if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
1320            return True
1321        else:
1322            return False
1323
1324    # Check if an object is a dispatchable handle
1325    def isHandleTypeDispatchable(self, handletype):
1326        handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1327        if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
1328            return True
1329        else:
1330            return False
1331    #
1332    #
1333    def beginFile(self, genOpts):
1334        OutputGenerator.beginFile(self, genOpts)
1335        # Output Copyright
1336        write(self.inline_copyright_message, file=self.outFile)
1337        # Multiple inclusion protection
1338        self.header = False
1339        if (self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
1340            self.header = True
1341            write('#pragma once', file=self.outFile)
1342            self.newline()
1343        if self.header:
1344            write(self.inline_custom_header_preamble, file=self.outFile)
1345        else:
1346            write(self.inline_custom_source_preamble, file=self.outFile)
1347        self.layer_factory += self.inline_custom_header_class_definition
1348    #
1349    #
1350    def endFile(self):
1351        # Finish C++ namespace and multiple inclusion protection
1352        self.newline()
1353        if not self.header:
1354            # Record intercepted procedures
1355            write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
1356            write('const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
1357            write('\n'.join(self.intercepts), file=self.outFile)
1358            write('};\n', file=self.outFile)
1359            self.newline()
1360            write('} // namespace vulkan_layer_chassis', file=self.outFile)
1361        if self.header:
1362            self.newline()
1363            # Output Layer Factory Class Definitions
1364            self.layer_factory += self.inline_custom_validation_class_definitions
1365            self.layer_factory += '};\n\n'
1366            self.layer_factory += 'extern std::unordered_map<void*, ValidationObject*> layer_data_map;'
1367            write(self.layer_factory, file=self.outFile)
1368        else:
1369            write(self.inline_custom_source_postamble, file=self.outFile)
1370        # Finish processing in superclass
1371        OutputGenerator.endFile(self)
1372
1373    def beginFeature(self, interface, emit):
1374        # Start processing in superclass
1375        OutputGenerator.beginFeature(self, interface, emit)
1376        # Get feature extra protect
1377        self.featureExtraProtect = GetFeatureProtect(interface)
1378        # Accumulate includes, defines, types, enums, function pointer typedefs, end function prototypes separately for this
1379        # feature. They're only printed in endFeature().
1380        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1381
1382    def endFeature(self):
1383        # Actually write the interface to the output file.
1384        if (self.emit):
1385            self.newline()
1386            # If type declarations are needed by other features based on this one, it may be necessary to suppress the ExtraProtect,
1387            # or move it below the 'for section...' loop.
1388            if (self.featureExtraProtect != None):
1389                write('#ifdef', self.featureExtraProtect, file=self.outFile)
1390            for section in self.TYPE_SECTIONS:
1391                contents = self.sections[section]
1392                if contents:
1393                    write('\n'.join(contents), file=self.outFile)
1394                    self.newline()
1395            if (self.sections['command']):
1396                write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
1397                self.newline()
1398            if (self.featureExtraProtect != None):
1399                write('#endif //', self.featureExtraProtect, file=self.outFile)
1400        # Finish processing in superclass
1401        OutputGenerator.endFeature(self)
1402    #
1403    # Append a definition to the specified section
1404    def appendSection(self, section, text):
1405        self.sections[section].append(text)
1406    #
1407    # Type generation
1408    def genType(self, typeinfo, name, alias):
1409        pass
1410    #
1411    # Struct (e.g. C "struct" type) generation. This is a special case of the <type> tag where the contents are
1412    # interpreted as a set of <member> tags instead of freeform C type declarations. The <member> tags are just like <param>
1413    # tags - they are a declaration of a struct or union member. Only simple member declarations are supported (no nested
1414    # structs etc.)
1415    def genStruct(self, typeinfo, typeName):
1416        OutputGenerator.genStruct(self, typeinfo, typeName)
1417        body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
1418        # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
1419        for member in typeinfo.elem.findall('.//member'):
1420            body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
1421            body += ';\n'
1422        body += '} ' + typeName + ';\n'
1423        self.appendSection('struct', body)
1424    #
1425    # Group (e.g. C "enum" type) generation. These are concatenated together with other types.
1426    def genGroup(self, groupinfo, groupName, alias):
1427        pass
1428    # Enumerant generation
1429    # <enum> tags may specify their values in several ways, but are usually just integers.
1430    def genEnum(self, enuminfo, name, alias):
1431        pass
1432    #
1433    # Customize Cdecl for layer factory base class
1434    def BaseClassCdecl(self, elem, name):
1435        raw = self.makeCDecls(elem)[1]
1436
1437        # Toss everything before the undecorated name
1438        prototype = raw.split("VKAPI_PTR *PFN_vk")[1]
1439        prototype = prototype.replace(")", "", 1)
1440        prototype = prototype.replace(";", " {};")
1441
1442        # Build up pre/post call virtual function declarations
1443        pre_call_validate = 'virtual bool PreCallValidate' + prototype
1444        pre_call_validate = pre_call_validate.replace("{}", " { return false; }")
1445        pre_call_record = 'virtual void PreCallRecord' + prototype
1446        post_call_record = 'virtual void PostCallRecord' + prototype
1447        resulttype = elem.find('proto/type')
1448        if resulttype.text == 'VkResult':
1449            post_call_record = post_call_record.replace(')', ', VkResult result)')
1450        return '        %s\n        %s\n        %s\n' % (pre_call_validate, pre_call_record, post_call_record)
1451    #
1452    # Command generation
1453    def genCmd(self, cmdinfo, name, alias):
1454        ignore_functions = [
1455        'vkEnumerateInstanceVersion',
1456        ]
1457
1458        if name in ignore_functions:
1459            return
1460
1461        if self.header: # In the header declare all intercepts
1462            self.appendSection('command', '')
1463            self.appendSection('command', self.makeCDecls(cmdinfo.elem)[0])
1464            if (self.featureExtraProtect != None):
1465                self.layer_factory += '#ifdef %s\n' % self.featureExtraProtect
1466            # Update base class with virtual function declarations
1467            if 'ValidationCache' not in name:
1468                self.layer_factory += self.BaseClassCdecl(cmdinfo.elem, name)
1469            if (self.featureExtraProtect != None):
1470                self.layer_factory += '#endif\n'
1471            return
1472
1473        if name in self.manual_functions:
1474            if 'ValidationCache' not in name:
1475                self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1476            else:
1477                self.intercepts += [ '#ifdef BUILD_CORE_VALIDATION' ]
1478                self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1479                self.intercepts += [ '#endif' ]
1480            return
1481        # Record that the function will be intercepted
1482        if (self.featureExtraProtect != None):
1483            self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1484        self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1485        if (self.featureExtraProtect != None):
1486            self.intercepts += [ '#endif' ]
1487        OutputGenerator.genCmd(self, cmdinfo, name, alias)
1488        #
1489        decls = self.makeCDecls(cmdinfo.elem)
1490        self.appendSection('command', '')
1491        self.appendSection('command', '%s {' % decls[0][:-1])
1492        # Setup common to call wrappers. First parameter is always dispatchable
1493        dispatchable_type = cmdinfo.elem.find('param/type').text
1494        dispatchable_name = cmdinfo.elem.find('param/name').text
1495        # Default to device
1496        device_or_instance = 'device'
1497        dispatch_table_name = 'VkLayerDispatchTable'
1498        # Set to instance as necessary
1499        if dispatchable_type in ["VkPhysicalDevice", "VkInstance"] or name == 'vkCreateInstance':
1500            device_or_instance = 'instance'
1501            dispatch_table_name = 'VkLayerInstanceDispatchTable'
1502        self.appendSection('command', '    auto layer_data = GetLayerDataPtr(get_dispatch_key(%s), layer_data_map);' % (dispatchable_name))
1503        api_function_name = cmdinfo.elem.attrib.get('name')
1504        params = cmdinfo.elem.findall('param/name')
1505        paramstext = ', '.join([str(param.text) for param in params])
1506        API = api_function_name.replace('vk','Dispatch') + '(layer_data, '
1507
1508        # Declare result variable, if any.
1509        return_map = {
1510            'PFN_vkVoidFunction': 'return nullptr;',
1511            'VkBool32': 'return VK_FALSE;',
1512            'VkDeviceAddress': 'return 0;',
1513            'VkResult': 'return VK_ERROR_VALIDATION_FAILED_EXT;',
1514            'void': 'return;',
1515            'uint32_t': 'return 0;'
1516            }
1517        resulttype = cmdinfo.elem.find('proto/type')
1518        assignresult = ''
1519        if (resulttype.text != 'void'):
1520            assignresult = resulttype.text + ' result = '
1521
1522        # Set up skip and locking
1523        self.appendSection('command', '    bool skip = false;')
1524
1525        # Generate pre-call validation source code
1526        self.appendSection('command', '    %s' % self.precallvalidate_loop)
1527        self.appendSection('command', '        auto lock = intercept->write_lock();')
1528        self.appendSection('command', '        skip |= intercept->PreCallValidate%s(%s);' % (api_function_name[2:], paramstext))
1529        self.appendSection('command', '        if (skip) %s' % return_map[resulttype.text])
1530        self.appendSection('command', '    }')
1531
1532        # Generate pre-call state recording source code
1533        self.appendSection('command', '    %s' % self.precallrecord_loop)
1534        self.appendSection('command', '        auto lock = intercept->write_lock();')
1535        self.appendSection('command', '        intercept->PreCallRecord%s(%s);' % (api_function_name[2:], paramstext))
1536        self.appendSection('command', '    }')
1537
1538        # Insert pre-dispatch debug utils function call
1539        if name in self.pre_dispatch_debug_utils_functions:
1540            self.appendSection('command', '    %s' % self.pre_dispatch_debug_utils_functions[name])
1541
1542        # Output dispatch (down-chain) function call
1543        self.appendSection('command', '    ' + assignresult + API + paramstext + ');')
1544
1545        # Insert post-dispatch debug utils function call
1546        if name in self.post_dispatch_debug_utils_functions:
1547            self.appendSection('command', '    %s' % self.post_dispatch_debug_utils_functions[name])
1548
1549        # Generate post-call object processing source code
1550        self.appendSection('command', '    %s' % self.postcallrecord_loop)
1551        returnparam = ''
1552        if (resulttype.text == 'VkResult'):
1553            returnparam = ', result'
1554        self.appendSection('command', '        auto lock = intercept->write_lock();')
1555        self.appendSection('command', '        intercept->PostCallRecord%s(%s%s);' % (api_function_name[2:], paramstext, returnparam))
1556        self.appendSection('command', '    }')
1557        # Return result variable, if any.
1558        if (resulttype.text != 'void'):
1559            self.appendSection('command', '    return result;')
1560        self.appendSection('command', '}')
1561    #
1562    # Override makeProtoName to drop the "vk" prefix
1563    def makeProtoName(self, name, tail):
1564        return self.genOpts.apientry + name[2:] + tail
1565