1 // Copyright 2015-2024 The Khronos Group Inc. 2 // 3 // SPDX-License-Identifier: Apache-2.0 OR MIT 4 // 5 6 // This header is generated from the Khronos Vulkan XML API Registry. 7 8 #ifndef VULKAN_FUNCS_HPP 9 #define VULKAN_FUNCS_HPP 10 11 namespace VULKAN_HPP_NAMESPACE 12 { 13 14 //=========================== 15 //=== COMMAND Definitions === 16 //=========================== 17 18 //=== VK_VERSION_1_0 === 19 20 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Instance * pInstance,Dispatch const & d)21 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, 22 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23 VULKAN_HPP_NAMESPACE::Instance * pInstance, 24 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 25 { 26 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 27 return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), 28 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 29 reinterpret_cast<VkInstance *>( pInstance ) ) ); 30 } 31 32 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 33 template <typename Dispatch> createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)34 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( 35 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 36 { 37 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 38 39 VULKAN_HPP_NAMESPACE::Instance instance; 40 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 41 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 42 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 43 reinterpret_cast<VkInstance *>( &instance ) ) ); 44 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); 45 46 return createResultValueType( result, instance ); 47 } 48 49 # ifndef VULKAN_HPP_NO_SMART_HANDLE 50 template <typename Dispatch> createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)51 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( 52 const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) 53 { 54 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 55 56 VULKAN_HPP_NAMESPACE::Instance instance; 57 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 58 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 59 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 60 reinterpret_cast<VkInstance *>( &instance ) ) ); 61 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); 62 63 return createResultValueType( result, 64 UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); 65 } 66 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 67 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 68 69 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const70 VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 71 { 72 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 73 d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 74 } 75 76 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 77 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const78 VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 79 { 80 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 81 82 d.vkDestroyInstance( m_instance, 83 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 84 } 85 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 86 87 template <typename Dispatch> enumeratePhysicalDevices(uint32_t * pPhysicalDeviceCount,VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,Dispatch const & d) const88 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, 89 VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, 90 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 91 { 92 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 93 return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); 94 } 95 96 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 97 template <typename PhysicalDeviceAllocator, typename Dispatch> 98 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(Dispatch const & d) const99 Instance::enumeratePhysicalDevices( Dispatch const & d ) const 100 { 101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 102 103 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; 104 uint32_t physicalDeviceCount; 105 VULKAN_HPP_NAMESPACE::Result result; 106 do 107 { 108 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 109 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 110 { 111 physicalDevices.resize( physicalDeviceCount ); 112 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 113 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 114 } 115 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 116 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 117 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 118 if ( physicalDeviceCount < physicalDevices.size() ) 119 { 120 physicalDevices.resize( physicalDeviceCount ); 121 } 122 return createResultValueType( result, physicalDevices ); 123 } 124 125 template <typename PhysicalDeviceAllocator, 126 typename Dispatch, 127 typename B1, 128 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PhysicalDevice>::value, int>::type> 129 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(PhysicalDeviceAllocator & physicalDeviceAllocator,Dispatch const & d) const130 Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const 131 { 132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 133 134 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); 135 uint32_t physicalDeviceCount; 136 VULKAN_HPP_NAMESPACE::Result result; 137 do 138 { 139 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 140 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) 141 { 142 physicalDevices.resize( physicalDeviceCount ); 143 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 144 d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 145 } 146 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 147 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 148 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 149 if ( physicalDeviceCount < physicalDevices.size() ) 150 { 151 physicalDevices.resize( physicalDeviceCount ); 152 } 153 return createResultValueType( result, physicalDevices ); 154 } 155 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 156 157 template <typename Dispatch> getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,Dispatch const & d) const158 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 159 { 160 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 161 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); 162 } 163 164 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 165 template <typename Dispatch> 166 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const & d) const167 PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 168 { 169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 170 171 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; 172 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); 173 174 return features; 175 } 176 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 177 178 template <typename Dispatch> getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,Dispatch const & d) const179 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 180 VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, 181 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 182 { 183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 184 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); 185 } 186 187 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 188 template <typename Dispatch> 189 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const190 PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 191 { 192 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 193 194 VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; 195 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); 196 197 return formatProperties; 198 } 199 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 200 201 template <typename Dispatch> getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,Dispatch const & d) const202 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 203 VULKAN_HPP_NAMESPACE::ImageType type, 204 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 205 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 206 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 207 VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, 208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 209 { 210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 211 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 212 static_cast<VkFormat>( format ), 213 static_cast<VkImageType>( type ), 214 static_cast<VkImageTiling>( tiling ), 215 static_cast<VkImageUsageFlags>( usage ), 216 static_cast<VkImageCreateFlags>( flags ), 217 reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); 218 } 219 220 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 221 template <typename Dispatch> 222 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,Dispatch const & d) const223 PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 224 VULKAN_HPP_NAMESPACE::ImageType type, 225 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 226 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 227 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 228 Dispatch const & d ) const 229 { 230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 231 232 VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; 233 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 234 d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, 235 static_cast<VkFormat>( format ), 236 static_cast<VkImageType>( type ), 237 static_cast<VkImageTiling>( tiling ), 238 static_cast<VkImageUsageFlags>( usage ), 239 static_cast<VkImageCreateFlags>( flags ), 240 reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) ); 241 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); 242 243 return createResultValueType( result, imageFormatProperties ); 244 } 245 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 246 247 template <typename Dispatch> getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,Dispatch const & d) const248 VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, 249 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 250 { 251 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 252 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); 253 } 254 255 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 256 template <typename Dispatch> 257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const & d) const258 PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 259 { 260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 261 262 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; 263 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); 264 265 return properties; 266 } 267 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 268 269 template <typename Dispatch> getQueueFamilyProperties(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,Dispatch const & d) const270 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, 271 VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, 272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 273 { 274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 275 d.vkGetPhysicalDeviceQueueFamilyProperties( 276 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); 277 } 278 279 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 280 template <typename QueueFamilyPropertiesAllocator, typename Dispatch> 281 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(Dispatch const & d) const282 PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const 283 { 284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 285 286 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; 287 uint32_t queueFamilyPropertyCount; 288 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 289 queueFamilyProperties.resize( queueFamilyPropertyCount ); 290 d.vkGetPhysicalDeviceQueueFamilyProperties( 291 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 292 293 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 294 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 295 { 296 queueFamilyProperties.resize( queueFamilyPropertyCount ); 297 } 298 return queueFamilyProperties; 299 } 300 301 template <typename QueueFamilyPropertiesAllocator, 302 typename Dispatch, 303 typename B1, 304 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, int>::type> 305 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,Dispatch const & d) const306 PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const 307 { 308 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 309 310 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator ); 311 uint32_t queueFamilyPropertyCount; 312 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 313 queueFamilyProperties.resize( queueFamilyPropertyCount ); 314 d.vkGetPhysicalDeviceQueueFamilyProperties( 315 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 316 317 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 318 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 319 { 320 queueFamilyProperties.resize( queueFamilyPropertyCount ); 321 } 322 return queueFamilyProperties; 323 } 324 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 325 326 template <typename Dispatch> getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,Dispatch const & d) const327 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, 328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 329 { 330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 331 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); 332 } 333 334 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 335 template <typename Dispatch> 336 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const & d) const337 PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 338 { 339 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 340 341 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; 342 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); 343 344 return memoryProperties; 345 } 346 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 347 348 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const349 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 350 { 351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 352 return d.vkGetInstanceProcAddr( m_instance, pName ); 353 } 354 355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 356 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const357 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 358 { 359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 360 361 PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); 362 363 return result; 364 } 365 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 366 367 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const368 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 369 { 370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 371 return d.vkGetDeviceProcAddr( m_device, pName ); 372 } 373 374 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 375 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const376 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 377 { 378 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 379 380 PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); 381 382 return result; 383 } 384 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 385 386 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Device * pDevice,Dispatch const & d) const387 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, 388 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 389 VULKAN_HPP_NAMESPACE::Device * pDevice, 390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 391 { 392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 393 return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, 394 reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), 395 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 396 reinterpret_cast<VkDevice *>( pDevice ) ) ); 397 } 398 399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 400 template <typename Dispatch> createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const401 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( 402 const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 403 { 404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 405 406 VULKAN_HPP_NAMESPACE::Device device; 407 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 408 d.vkCreateDevice( m_physicalDevice, 409 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 410 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 411 reinterpret_cast<VkDevice *>( &device ) ) ); 412 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); 413 414 return createResultValueType( result, device ); 415 } 416 417 # ifndef VULKAN_HPP_NO_SMART_HANDLE 418 template <typename Dispatch> 419 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const420 PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, 421 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 422 Dispatch const & d ) const 423 { 424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 425 426 VULKAN_HPP_NAMESPACE::Device device; 427 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 428 d.vkCreateDevice( m_physicalDevice, 429 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 430 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 431 reinterpret_cast<VkDevice *>( &device ) ) ); 432 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); 433 434 return createResultValueType( result, UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); 435 } 436 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 437 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 438 439 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const440 VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 441 { 442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 443 d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 444 } 445 446 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 447 template <typename Dispatch> destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const448 VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 449 { 450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 451 452 d.vkDestroyDevice( m_device, 453 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 454 } 455 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 456 457 template <typename Dispatch> enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d)458 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, 459 uint32_t * pPropertyCount, 460 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 461 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 462 { 463 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 464 return static_cast<Result>( 465 d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 466 } 467 468 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 469 template <typename ExtensionPropertiesAllocator, typename Dispatch> 470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d)471 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) 472 { 473 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 474 475 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 476 uint32_t propertyCount; 477 VULKAN_HPP_NAMESPACE::Result result; 478 do 479 { 480 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 481 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 482 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 483 { 484 properties.resize( propertyCount ); 485 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 486 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 487 } 488 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 489 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 490 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 491 if ( propertyCount < properties.size() ) 492 { 493 properties.resize( propertyCount ); 494 } 495 return createResultValueType( result, properties ); 496 } 497 498 template <typename ExtensionPropertiesAllocator, 499 typename Dispatch, 500 typename B1, 501 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 502 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d)503 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, 504 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 505 Dispatch const & d ) 506 { 507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 508 509 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 510 uint32_t propertyCount; 511 VULKAN_HPP_NAMESPACE::Result result; 512 do 513 { 514 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 515 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 516 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 517 { 518 properties.resize( propertyCount ); 519 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceExtensionProperties( 520 layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 521 } 522 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 523 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 524 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 525 if ( propertyCount < properties.size() ) 526 { 527 properties.resize( propertyCount ); 528 } 529 return createResultValueType( result, properties ); 530 } 531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 532 533 template <typename Dispatch> enumerateDeviceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d) const534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, 535 uint32_t * pPropertyCount, 536 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 537 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 538 { 539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 540 return static_cast<Result>( 541 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 542 } 543 544 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 545 template <typename ExtensionPropertiesAllocator, typename Dispatch> 546 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d) const547 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const 548 { 549 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 550 551 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; 552 uint32_t propertyCount; 553 VULKAN_HPP_NAMESPACE::Result result; 554 do 555 { 556 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 557 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 558 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 559 { 560 properties.resize( propertyCount ); 561 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 562 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 563 } 564 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 565 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 566 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 567 if ( propertyCount < properties.size() ) 568 { 569 properties.resize( propertyCount ); 570 } 571 return createResultValueType( result, properties ); 572 } 573 574 template <typename ExtensionPropertiesAllocator, 575 typename Dispatch, 576 typename B1, 577 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, int>::type> 578 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d) const579 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, 580 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 581 Dispatch const & d ) const 582 { 583 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 584 585 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 586 uint32_t propertyCount; 587 VULKAN_HPP_NAMESPACE::Result result; 588 do 589 { 590 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 591 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 592 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 593 { 594 properties.resize( propertyCount ); 595 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceExtensionProperties( 596 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 597 } 598 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 599 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 600 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 601 if ( propertyCount < properties.size() ) 602 { 603 properties.resize( propertyCount ); 604 } 605 return createResultValueType( result, properties ); 606 } 607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 608 609 template <typename Dispatch> enumerateInstanceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d)610 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, 611 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 612 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 613 { 614 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 615 return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 616 } 617 618 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 619 template <typename LayerPropertiesAllocator, typename Dispatch> 620 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(Dispatch const & d)621 enumerateInstanceLayerProperties( Dispatch const & d ) 622 { 623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 624 625 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 626 uint32_t propertyCount; 627 VULKAN_HPP_NAMESPACE::Result result; 628 do 629 { 630 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 631 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 632 { 633 properties.resize( propertyCount ); 634 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 635 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 636 } 637 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 638 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 639 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 640 if ( propertyCount < properties.size() ) 641 { 642 properties.resize( propertyCount ); 643 } 644 return createResultValueType( result, properties ); 645 } 646 647 template <typename LayerPropertiesAllocator, 648 typename Dispatch, 649 typename B1, 650 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 651 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d)652 enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) 653 { 654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 655 656 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 657 uint32_t propertyCount; 658 VULKAN_HPP_NAMESPACE::Result result; 659 do 660 { 661 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 662 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 663 { 664 properties.resize( propertyCount ); 665 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 666 d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 667 } 668 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 669 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 670 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 671 if ( propertyCount < properties.size() ) 672 { 673 properties.resize( propertyCount ); 674 } 675 return createResultValueType( result, properties ); 676 } 677 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 678 679 template <typename Dispatch> enumerateDeviceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d) const680 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, 681 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 683 { 684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 685 return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 686 } 687 688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 689 template <typename LayerPropertiesAllocator, typename Dispatch> 690 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(Dispatch const & d) const691 PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const 692 { 693 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 694 695 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; 696 uint32_t propertyCount; 697 VULKAN_HPP_NAMESPACE::Result result; 698 do 699 { 700 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 701 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 702 { 703 properties.resize( propertyCount ); 704 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 705 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 706 } 707 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 708 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 709 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 710 if ( propertyCount < properties.size() ) 711 { 712 properties.resize( propertyCount ); 713 } 714 return createResultValueType( result, properties ); 715 } 716 717 template <typename LayerPropertiesAllocator, 718 typename Dispatch, 719 typename B1, 720 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::LayerProperties>::value, int>::type> 721 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d) const722 PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const 723 { 724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 725 726 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 727 uint32_t propertyCount; 728 VULKAN_HPP_NAMESPACE::Result result; 729 do 730 { 731 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 732 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 733 { 734 properties.resize( propertyCount ); 735 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 736 d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 737 } 738 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 739 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 740 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 741 if ( propertyCount < properties.size() ) 742 { 743 properties.resize( propertyCount ); 744 } 745 return createResultValueType( result, properties ); 746 } 747 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 748 749 template <typename Dispatch> 750 VULKAN_HPP_INLINE void getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const751 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 752 { 753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 754 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); 755 } 756 757 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 758 template <typename Dispatch> 759 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,Dispatch const & d) const760 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 761 { 762 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 763 764 VULKAN_HPP_NAMESPACE::Queue queue; 765 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); 766 767 return queue; 768 } 769 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 770 771 template <typename Dispatch> submit(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const772 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, 773 const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, 774 VULKAN_HPP_NAMESPACE::Fence fence, 775 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 776 { 777 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 778 return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 779 } 780 781 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 782 template <typename Dispatch> submit(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const783 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( 784 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 785 { 786 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 787 788 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 789 d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 790 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); 791 792 return createResultValueType( result ); 793 } 794 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 795 796 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 797 template <typename Dispatch> waitIdle(Dispatch const & d) const798 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 799 { 800 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 801 return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); 802 } 803 #else 804 template <typename Dispatch> waitIdle(Dispatch const & d) const805 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const 806 { 807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 808 809 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueueWaitIdle( m_queue ) ); 810 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); 811 812 return createResultValueType( result ); 813 } 814 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 815 816 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 817 template <typename Dispatch> waitIdle(Dispatch const & d) const818 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 819 { 820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 821 return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); 822 } 823 #else 824 template <typename Dispatch> waitIdle(Dispatch const & d) const825 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const 826 { 827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 828 829 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeviceWaitIdle( m_device ) ); 830 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); 831 832 return createResultValueType( result ); 833 } 834 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 835 836 template <typename Dispatch> allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,Dispatch const & d) const837 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, 838 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 839 VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, 840 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 841 { 842 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 843 return static_cast<Result>( d.vkAllocateMemory( m_device, 844 reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), 845 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 846 reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); 847 } 848 849 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 850 template <typename Dispatch> 851 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const852 Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 853 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 854 Dispatch const & d ) const 855 { 856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 857 858 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 859 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 860 d.vkAllocateMemory( m_device, 861 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 862 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 863 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 864 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); 865 866 return createResultValueType( result, memory ); 867 } 868 869 # ifndef VULKAN_HPP_NO_SMART_HANDLE 870 template <typename Dispatch> 871 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const872 Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, 873 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 874 Dispatch const & d ) const 875 { 876 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 877 878 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 879 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 880 d.vkAllocateMemory( m_device, 881 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 882 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 883 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 884 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); 885 886 return createResultValueType( result, 887 UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) ); 888 } 889 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 890 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 891 892 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const893 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 894 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 896 { 897 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 898 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 899 } 900 901 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 902 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const903 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 904 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 905 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 906 { 907 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 908 909 d.vkFreeMemory( m_device, 910 static_cast<VkDeviceMemory>( memory ), 911 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 912 } 913 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 914 915 template <typename Dispatch> 916 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 917 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 918 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 919 { 920 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 921 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 922 } 923 924 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 925 template <typename Dispatch> 926 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 927 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 928 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 929 { 930 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 931 932 d.vkFreeMemory( m_device, 933 static_cast<VkDeviceMemory>( memory ), 934 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 935 } 936 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 937 938 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,void ** ppData,Dispatch const & d) const939 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 940 VULKAN_HPP_NAMESPACE::DeviceSize offset, 941 VULKAN_HPP_NAMESPACE::DeviceSize size, 942 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 943 void ** ppData, 944 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 945 { 946 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 947 return static_cast<Result>( d.vkMapMemory( m_device, 948 static_cast<VkDeviceMemory>( memory ), 949 static_cast<VkDeviceSize>( offset ), 950 static_cast<VkDeviceSize>( size ), 951 static_cast<VkMemoryMapFlags>( flags ), 952 ppData ) ); 953 } 954 955 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 956 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,Dispatch const & d) const957 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 958 VULKAN_HPP_NAMESPACE::DeviceSize offset, 959 VULKAN_HPP_NAMESPACE::DeviceSize size, 960 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 961 Dispatch const & d ) const 962 { 963 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 964 965 void * pData; 966 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory( m_device, 967 static_cast<VkDeviceMemory>( memory ), 968 static_cast<VkDeviceSize>( offset ), 969 static_cast<VkDeviceSize>( size ), 970 static_cast<VkMemoryMapFlags>( flags ), 971 &pData ) ); 972 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); 973 974 return createResultValueType( result, pData ); 975 } 976 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 977 978 template <typename Dispatch> unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const979 VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 980 { 981 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 982 d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) ); 983 } 984 985 template <typename Dispatch> flushMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const986 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, 987 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 988 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 989 { 990 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 991 return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 992 } 993 994 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 995 template <typename Dispatch> 996 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type flushMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const997 Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 998 Dispatch const & d ) const 999 { 1000 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1001 1002 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1003 d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1004 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); 1005 1006 return createResultValueType( result ); 1007 } 1008 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1009 1010 template <typename Dispatch> invalidateMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const1011 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, 1012 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 1013 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1014 { 1015 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1016 return static_cast<Result>( 1017 d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 1018 } 1019 1020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1021 template <typename Dispatch> 1022 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type invalidateMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const1023 Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 1024 Dispatch const & d ) const 1025 { 1026 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1027 1028 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1029 d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 1030 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); 1031 1032 return createResultValueType( result ); 1033 } 1034 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1035 1036 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,Dispatch const & d) const1037 VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1038 VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, 1039 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1040 { 1041 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1042 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); 1043 } 1044 1045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1046 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1047 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1049 { 1050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1051 1052 VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; 1053 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); 1054 1055 return committedMemoryInBytes; 1056 } 1057 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1058 1059 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1060 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1061 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, 1062 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1063 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1065 { 1066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1067 return static_cast<Result>( 1068 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1069 } 1070 #else 1071 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1072 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( 1073 VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1074 { 1075 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1076 1077 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1078 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1079 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); 1080 1081 return createResultValueType( result ); 1082 } 1083 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1084 1085 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1086 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1087 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, 1088 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1089 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1090 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1091 { 1092 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1093 return static_cast<Result>( 1094 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1095 } 1096 #else 1097 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1098 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( 1099 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const 1100 { 1101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1102 1103 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1104 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); 1105 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); 1106 1107 return createResultValueType( result ); 1108 } 1109 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1110 1111 template <typename Dispatch> getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1112 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, 1113 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1115 { 1116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1117 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1118 } 1119 1120 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1121 template <typename Dispatch> 1122 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,Dispatch const & d) const1123 Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1124 { 1125 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1126 1127 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1128 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1129 1130 return memoryRequirements; 1131 } 1132 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1133 1134 template <typename Dispatch> getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1135 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1136 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1137 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1138 { 1139 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1140 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1141 } 1142 1143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1144 template <typename Dispatch> 1145 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1146 Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1147 { 1148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1149 1150 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1151 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1152 1153 return memoryRequirements; 1154 } 1155 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1156 1157 template <typename Dispatch> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,Dispatch const & d) const1158 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1159 uint32_t * pSparseMemoryRequirementCount, 1160 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, 1161 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1162 { 1163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1164 d.vkGetImageSparseMemoryRequirements( m_device, 1165 static_cast<VkImage>( image ), 1166 pSparseMemoryRequirementCount, 1167 reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); 1168 } 1169 1170 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1171 template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch> 1172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1173 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 1174 { 1175 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1176 1177 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; 1178 uint32_t sparseMemoryRequirementCount; 1179 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1180 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1181 d.vkGetImageSparseMemoryRequirements( m_device, 1182 static_cast<VkImage>( image ), 1183 &sparseMemoryRequirementCount, 1184 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1185 1186 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1187 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1188 { 1189 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1190 } 1191 return sparseMemoryRequirements; 1192 } 1193 1194 template <typename SparseImageMemoryRequirementsAllocator, 1195 typename Dispatch, 1196 typename B1, 1197 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, int>::type> 1198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,Dispatch const & d) const1199 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1200 SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, 1201 Dispatch const & d ) const 1202 { 1203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1204 1205 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( 1206 sparseImageMemoryRequirementsAllocator ); 1207 uint32_t sparseMemoryRequirementCount; 1208 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1209 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1210 d.vkGetImageSparseMemoryRequirements( m_device, 1211 static_cast<VkImage>( image ), 1212 &sparseMemoryRequirementCount, 1213 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1214 1215 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1216 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 1217 { 1218 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1219 } 1220 return sparseMemoryRequirements; 1221 } 1222 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1223 1224 template <typename Dispatch> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,Dispatch const & d) const1225 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1226 VULKAN_HPP_NAMESPACE::ImageType type, 1227 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1228 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1229 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1230 uint32_t * pPropertyCount, 1231 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, 1232 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1233 { 1234 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1235 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1236 static_cast<VkFormat>( format ), 1237 static_cast<VkImageType>( type ), 1238 static_cast<VkSampleCountFlagBits>( samples ), 1239 static_cast<VkImageUsageFlags>( usage ), 1240 static_cast<VkImageTiling>( tiling ), 1241 pPropertyCount, 1242 reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); 1243 } 1244 1245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1246 template <typename SparseImageFormatPropertiesAllocator, typename Dispatch> 1247 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,Dispatch const & d) const1248 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1249 VULKAN_HPP_NAMESPACE::ImageType type, 1250 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1251 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1252 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1253 Dispatch const & d ) const 1254 { 1255 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1256 1257 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; 1258 uint32_t propertyCount; 1259 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1260 static_cast<VkFormat>( format ), 1261 static_cast<VkImageType>( type ), 1262 static_cast<VkSampleCountFlagBits>( samples ), 1263 static_cast<VkImageUsageFlags>( usage ), 1264 static_cast<VkImageTiling>( tiling ), 1265 &propertyCount, 1266 nullptr ); 1267 properties.resize( propertyCount ); 1268 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1269 static_cast<VkFormat>( format ), 1270 static_cast<VkImageType>( type ), 1271 static_cast<VkSampleCountFlagBits>( samples ), 1272 static_cast<VkImageUsageFlags>( usage ), 1273 static_cast<VkImageTiling>( tiling ), 1274 &propertyCount, 1275 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1276 1277 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1278 if ( propertyCount < properties.size() ) 1279 { 1280 properties.resize( propertyCount ); 1281 } 1282 return properties; 1283 } 1284 1285 template <typename SparseImageFormatPropertiesAllocator, 1286 typename Dispatch, 1287 typename B1, 1288 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, int>::type> 1289 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,Dispatch const & d) const1290 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1291 VULKAN_HPP_NAMESPACE::ImageType type, 1292 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1293 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1294 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1295 SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, 1296 Dispatch const & d ) const 1297 { 1298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1299 1300 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator ); 1301 uint32_t propertyCount; 1302 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1303 static_cast<VkFormat>( format ), 1304 static_cast<VkImageType>( type ), 1305 static_cast<VkSampleCountFlagBits>( samples ), 1306 static_cast<VkImageUsageFlags>( usage ), 1307 static_cast<VkImageTiling>( tiling ), 1308 &propertyCount, 1309 nullptr ); 1310 properties.resize( propertyCount ); 1311 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1312 static_cast<VkFormat>( format ), 1313 static_cast<VkImageType>( type ), 1314 static_cast<VkSampleCountFlagBits>( samples ), 1315 static_cast<VkImageUsageFlags>( usage ), 1316 static_cast<VkImageTiling>( tiling ), 1317 &propertyCount, 1318 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1319 1320 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1321 if ( propertyCount < properties.size() ) 1322 { 1323 properties.resize( propertyCount ); 1324 } 1325 return properties; 1326 } 1327 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1328 1329 template <typename Dispatch> bindSparse(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1330 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, 1331 const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, 1332 VULKAN_HPP_NAMESPACE::Fence fence, 1333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1334 { 1335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1336 return static_cast<Result>( 1337 d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); 1338 } 1339 1340 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1341 template <typename Dispatch> bindSparse(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1342 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( 1343 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1344 { 1345 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1346 1347 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1348 d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) ); 1349 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); 1350 1351 return createResultValueType( result ); 1352 } 1353 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1354 1355 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const1356 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, 1357 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1358 VULKAN_HPP_NAMESPACE::Fence * pFence, 1359 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1360 { 1361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1362 return static_cast<Result>( d.vkCreateFence( m_device, 1363 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), 1364 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1365 reinterpret_cast<VkFence *>( pFence ) ) ); 1366 } 1367 1368 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1369 template <typename Dispatch> createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1370 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( 1371 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1372 { 1373 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1374 1375 VULKAN_HPP_NAMESPACE::Fence fence; 1376 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1377 d.vkCreateFence( m_device, 1378 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1379 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1380 reinterpret_cast<VkFence *>( &fence ) ) ); 1381 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); 1382 1383 return createResultValueType( result, fence ); 1384 } 1385 1386 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1387 template <typename Dispatch> createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1388 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( 1389 const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1390 { 1391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1392 1393 VULKAN_HPP_NAMESPACE::Fence fence; 1394 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1395 d.vkCreateFence( m_device, 1396 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1397 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1398 reinterpret_cast<VkFence *>( &fence ) ) ); 1399 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); 1400 1401 return createResultValueType( result, 1402 UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1403 } 1404 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1405 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1406 1407 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1408 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1409 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1410 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1411 { 1412 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1413 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1414 } 1415 1416 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1417 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1418 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1419 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1420 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1421 { 1422 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1423 1424 d.vkDestroyFence( m_device, 1425 static_cast<VkFence>( fence ), 1426 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1427 } 1428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1429 1430 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1431 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1432 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1433 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1434 { 1435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1436 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1437 } 1438 1439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1440 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1441 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1442 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1443 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1444 { 1445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1446 1447 d.vkDestroyFence( m_device, 1448 static_cast<VkFence>( fence ), 1449 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1450 } 1451 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1452 1453 template <typename Dispatch> resetFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,Dispatch const & d) const1454 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, 1455 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1456 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1457 { 1458 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1459 return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); 1460 } 1461 1462 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1463 template <typename Dispatch> 1464 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,Dispatch const & d) const1465 Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const 1466 { 1467 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1468 1469 VULKAN_HPP_NAMESPACE::Result result = 1470 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) ); 1471 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); 1472 1473 return createResultValueType( result ); 1474 } 1475 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1476 1477 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1478 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1479 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1480 { 1481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1482 return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1483 } 1484 #else 1485 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1486 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 1487 { 1488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1489 1490 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1491 resultCheck( 1492 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1493 1494 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1495 } 1496 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1497 1498 template <typename Dispatch> waitForFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1499 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, 1500 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1501 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1502 uint64_t timeout, 1503 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1504 { 1505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1506 return static_cast<Result>( 1507 d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); 1508 } 1509 1510 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1511 template <typename Dispatch> 1512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1513 Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, 1514 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1515 uint64_t timeout, 1516 Dispatch const & d ) const 1517 { 1518 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1519 1520 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1521 d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) ); 1522 resultCheck( 1523 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 1524 1525 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1526 } 1527 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1528 1529 template <typename Dispatch> createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,Dispatch const & d) const1530 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, 1531 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1532 VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, 1533 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1534 { 1535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1536 return static_cast<Result>( d.vkCreateSemaphore( m_device, 1537 reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), 1538 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1539 reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); 1540 } 1541 1542 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1543 template <typename Dispatch> 1544 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1545 Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1546 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1547 Dispatch const & d ) const 1548 { 1549 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1550 1551 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1552 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1553 d.vkCreateSemaphore( m_device, 1554 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1555 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1556 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1557 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); 1558 1559 return createResultValueType( result, semaphore ); 1560 } 1561 1562 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1563 template <typename Dispatch> 1564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1565 Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, 1566 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1567 Dispatch const & d ) const 1568 { 1569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1570 1571 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1572 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1573 d.vkCreateSemaphore( m_device, 1574 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1575 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1576 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1577 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); 1578 1579 return createResultValueType( 1580 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1581 } 1582 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1583 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1584 1585 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1586 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1587 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1588 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1589 { 1590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1591 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1592 } 1593 1594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1595 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1596 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1597 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1599 { 1600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1601 1602 d.vkDestroySemaphore( m_device, 1603 static_cast<VkSemaphore>( semaphore ), 1604 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1605 } 1606 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1607 1608 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1609 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1610 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1611 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1612 { 1613 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1614 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1615 } 1616 1617 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1618 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1619 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1620 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1621 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1622 { 1623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1624 1625 d.vkDestroySemaphore( m_device, 1626 static_cast<VkSemaphore>( semaphore ), 1627 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1628 } 1629 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1630 1631 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Event * pEvent,Dispatch const & d) const1632 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, 1633 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1634 VULKAN_HPP_NAMESPACE::Event * pEvent, 1635 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1636 { 1637 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1638 return static_cast<Result>( d.vkCreateEvent( m_device, 1639 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), 1640 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1641 reinterpret_cast<VkEvent *>( pEvent ) ) ); 1642 } 1643 1644 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1645 template <typename Dispatch> createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1646 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( 1647 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1648 { 1649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1650 1651 VULKAN_HPP_NAMESPACE::Event event; 1652 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1653 d.vkCreateEvent( m_device, 1654 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1655 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1656 reinterpret_cast<VkEvent *>( &event ) ) ); 1657 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); 1658 1659 return createResultValueType( result, event ); 1660 } 1661 1662 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1663 template <typename Dispatch> createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1664 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( 1665 const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1666 { 1667 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1668 1669 VULKAN_HPP_NAMESPACE::Event event; 1670 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1671 d.vkCreateEvent( m_device, 1672 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1673 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1674 reinterpret_cast<VkEvent *>( &event ) ) ); 1675 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); 1676 1677 return createResultValueType( result, 1678 UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1679 } 1680 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1681 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1682 1683 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1684 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1685 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1686 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1687 { 1688 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1689 d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1690 } 1691 1692 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1693 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1694 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1695 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1696 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1697 { 1698 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1699 1700 d.vkDestroyEvent( m_device, 1701 static_cast<VkEvent>( event ), 1702 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1703 } 1704 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1705 1706 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1707 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1708 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1709 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1710 { 1711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1712 d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1713 } 1714 1715 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1716 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1717 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1718 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1719 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1720 { 1721 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1722 1723 d.vkDestroyEvent( m_device, 1724 static_cast<VkEvent>( event ), 1725 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1726 } 1727 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1728 1729 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1730 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1731 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1732 { 1733 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1734 return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1735 } 1736 #else 1737 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1738 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1739 { 1740 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1741 1742 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1743 resultCheck( 1744 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); 1745 1746 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 1747 } 1748 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1749 1750 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1751 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1752 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1753 { 1754 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1755 return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1756 } 1757 #else 1758 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1759 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, 1760 Dispatch const & d ) const 1761 { 1762 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1763 1764 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1765 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); 1766 1767 return createResultValueType( result ); 1768 } 1769 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1770 1771 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1772 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1773 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1774 { 1775 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1776 return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1777 } 1778 #else 1779 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1780 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1781 { 1782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1783 1784 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1785 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); 1786 1787 return createResultValueType( result ); 1788 } 1789 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1790 1791 template <typename Dispatch> createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,Dispatch const & d) const1792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, 1793 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1794 VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, 1795 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1796 { 1797 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1798 return static_cast<Result>( d.vkCreateQueryPool( m_device, 1799 reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), 1800 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1801 reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); 1802 } 1803 1804 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1805 template <typename Dispatch> 1806 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1807 Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 1808 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1809 Dispatch const & d ) const 1810 { 1811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1812 1813 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 1814 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1815 d.vkCreateQueryPool( m_device, 1816 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 1817 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1818 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 1819 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); 1820 1821 return createResultValueType( result, queryPool ); 1822 } 1823 1824 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1825 template <typename Dispatch> 1826 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1827 Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, 1828 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1829 Dispatch const & d ) const 1830 { 1831 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1832 1833 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 1834 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1835 d.vkCreateQueryPool( m_device, 1836 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 1837 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1838 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 1839 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); 1840 1841 return createResultValueType( 1842 result, UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 1843 } 1844 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 1845 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1846 1847 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1848 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1849 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1850 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1851 { 1852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1853 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1854 } 1855 1856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1857 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1858 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1859 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1860 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1861 { 1862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1863 1864 d.vkDestroyQueryPool( m_device, 1865 static_cast<VkQueryPool>( queryPool ), 1866 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1867 } 1868 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1869 1870 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1871 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1872 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1874 { 1875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1876 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1877 } 1878 1879 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1880 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1881 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1882 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 1883 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1884 { 1885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1886 1887 d.vkDestroyQueryPool( m_device, 1888 static_cast<VkQueryPool>( queryPool ), 1889 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1890 } 1891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1892 1893 template <typename Dispatch> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1894 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1895 uint32_t firstQuery, 1896 uint32_t queryCount, 1897 size_t dataSize, 1898 void * pData, 1899 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1900 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1902 { 1903 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1904 return static_cast<Result>( d.vkGetQueryPoolResults( m_device, 1905 static_cast<VkQueryPool>( queryPool ), 1906 firstQuery, 1907 queryCount, 1908 dataSize, 1909 pData, 1910 static_cast<VkDeviceSize>( stride ), 1911 static_cast<VkQueryResultFlags>( flags ) ) ); 1912 } 1913 1914 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1915 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 1916 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1917 Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1918 uint32_t firstQuery, 1919 uint32_t queryCount, 1920 size_t dataSize, 1921 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1922 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1923 Dispatch const & d ) const 1924 { 1925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1926 1927 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 1928 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 1929 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 1930 static_cast<VkQueryPool>( queryPool ), 1931 firstQuery, 1932 queryCount, 1933 data.size() * sizeof( DataType ), 1934 reinterpret_cast<void *>( data.data() ), 1935 static_cast<VkDeviceSize>( stride ), 1936 static_cast<VkQueryResultFlags>( flags ) ) ); 1937 resultCheck( result, 1938 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", 1939 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1940 1941 return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); 1942 } 1943 1944 template <typename DataType, typename Dispatch> getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1945 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1946 uint32_t firstQuery, 1947 uint32_t queryCount, 1948 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1949 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1950 Dispatch const & d ) const 1951 { 1952 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1953 1954 DataType data; 1955 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetQueryPoolResults( m_device, 1956 static_cast<VkQueryPool>( queryPool ), 1957 firstQuery, 1958 queryCount, 1959 sizeof( DataType ), 1960 reinterpret_cast<void *>( &data ), 1961 static_cast<VkDeviceSize>( stride ), 1962 static_cast<VkQueryResultFlags>( flags ) ) ); 1963 resultCheck( 1964 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1965 1966 return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); 1967 } 1968 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 1969 1970 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Buffer * pBuffer,Dispatch const & d) const1971 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, 1972 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1973 VULKAN_HPP_NAMESPACE::Buffer * pBuffer, 1974 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1975 { 1976 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1977 return static_cast<Result>( d.vkCreateBuffer( m_device, 1978 reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), 1979 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1980 reinterpret_cast<VkBuffer *>( pBuffer ) ) ); 1981 } 1982 1983 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1984 template <typename Dispatch> createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1985 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( 1986 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 1987 { 1988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 1989 1990 VULKAN_HPP_NAMESPACE::Buffer buffer; 1991 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 1992 d.vkCreateBuffer( m_device, 1993 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 1994 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1995 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 1996 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); 1997 1998 return createResultValueType( result, buffer ); 1999 } 2000 2001 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2002 template <typename Dispatch> createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2003 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( 2004 const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2005 { 2006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2007 2008 VULKAN_HPP_NAMESPACE::Buffer buffer; 2009 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2010 d.vkCreateBuffer( m_device, 2011 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 2012 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2013 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 2014 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); 2015 2016 return createResultValueType( result, 2017 UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2018 } 2019 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2020 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2021 2022 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2023 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2024 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2025 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2026 { 2027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2028 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2029 } 2030 2031 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2032 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2033 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 2034 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2035 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2036 { 2037 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2038 2039 d.vkDestroyBuffer( m_device, 2040 static_cast<VkBuffer>( buffer ), 2041 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2042 } 2043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2044 2045 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2046 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2047 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2049 { 2050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2051 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2052 } 2053 2054 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2055 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2056 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 2057 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2059 { 2060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2061 2062 d.vkDestroyBuffer( m_device, 2063 static_cast<VkBuffer>( buffer ), 2064 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2065 } 2066 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2067 2068 template <typename Dispatch> createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferView * pView,Dispatch const & d) const2069 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, 2070 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2071 VULKAN_HPP_NAMESPACE::BufferView * pView, 2072 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2073 { 2074 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2075 return static_cast<Result>( d.vkCreateBufferView( m_device, 2076 reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), 2077 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2078 reinterpret_cast<VkBufferView *>( pView ) ) ); 2079 } 2080 2081 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2082 template <typename Dispatch> 2083 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2084 Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2085 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2086 Dispatch const & d ) const 2087 { 2088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2089 2090 VULKAN_HPP_NAMESPACE::BufferView view; 2091 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2092 d.vkCreateBufferView( m_device, 2093 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2094 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2095 reinterpret_cast<VkBufferView *>( &view ) ) ); 2096 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); 2097 2098 return createResultValueType( result, view ); 2099 } 2100 2101 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2102 template <typename Dispatch> 2103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2104 Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, 2105 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2106 Dispatch const & d ) const 2107 { 2108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2109 2110 VULKAN_HPP_NAMESPACE::BufferView view; 2111 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2112 d.vkCreateBufferView( m_device, 2113 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2114 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2115 reinterpret_cast<VkBufferView *>( &view ) ) ); 2116 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); 2117 2118 return createResultValueType( result, 2119 UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2120 } 2121 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2122 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2123 2124 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2125 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2126 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2128 { 2129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2130 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2131 } 2132 2133 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2134 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2135 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2136 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2137 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2138 { 2139 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2140 2141 d.vkDestroyBufferView( m_device, 2142 static_cast<VkBufferView>( bufferView ), 2143 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2144 } 2145 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2146 2147 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2148 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2149 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2150 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2151 { 2152 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2153 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2154 } 2155 2156 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2157 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2158 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2159 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2160 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2161 { 2162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2163 2164 d.vkDestroyBufferView( m_device, 2165 static_cast<VkBufferView>( bufferView ), 2166 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2167 } 2168 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2169 2170 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Image * pImage,Dispatch const & d) const2171 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, 2172 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2173 VULKAN_HPP_NAMESPACE::Image * pImage, 2174 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2175 { 2176 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2177 return static_cast<Result>( d.vkCreateImage( m_device, 2178 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), 2179 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2180 reinterpret_cast<VkImage *>( pImage ) ) ); 2181 } 2182 2183 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2184 template <typename Dispatch> createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2185 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( 2186 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2187 { 2188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2189 2190 VULKAN_HPP_NAMESPACE::Image image; 2191 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2192 d.vkCreateImage( m_device, 2193 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2194 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2195 reinterpret_cast<VkImage *>( &image ) ) ); 2196 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); 2197 2198 return createResultValueType( result, image ); 2199 } 2200 2201 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2202 template <typename Dispatch> createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2203 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( 2204 const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 2205 { 2206 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2207 2208 VULKAN_HPP_NAMESPACE::Image image; 2209 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2210 d.vkCreateImage( m_device, 2211 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2212 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2213 reinterpret_cast<VkImage *>( &image ) ) ); 2214 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); 2215 2216 return createResultValueType( result, 2217 UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2218 } 2219 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2220 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2221 2222 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2223 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2224 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2225 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2226 { 2227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2228 d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2229 } 2230 2231 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2232 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2233 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2234 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2235 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2236 { 2237 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2238 2239 d.vkDestroyImage( m_device, 2240 static_cast<VkImage>( image ), 2241 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2242 } 2243 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2244 2245 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2246 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2247 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2248 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2249 { 2250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2251 d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2252 } 2253 2254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2255 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2256 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2257 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2258 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2259 { 2260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2261 2262 d.vkDestroyImage( m_device, 2263 static_cast<VkImage>( image ), 2264 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2265 } 2266 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2267 2268 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,Dispatch const & d) const2269 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, 2270 const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, 2271 VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, 2272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2273 { 2274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2275 d.vkGetImageSubresourceLayout( m_device, 2276 static_cast<VkImage>( image ), 2277 reinterpret_cast<const VkImageSubresource *>( pSubresource ), 2278 reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); 2279 } 2280 2281 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2282 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,Dispatch const & d) const2283 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( 2284 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2285 { 2286 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2287 2288 VULKAN_HPP_NAMESPACE::SubresourceLayout layout; 2289 d.vkGetImageSubresourceLayout( m_device, 2290 static_cast<VkImage>( image ), 2291 reinterpret_cast<const VkImageSubresource *>( &subresource ), 2292 reinterpret_cast<VkSubresourceLayout *>( &layout ) ); 2293 2294 return layout; 2295 } 2296 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2297 2298 template <typename Dispatch> createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ImageView * pView,Dispatch const & d) const2299 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, 2300 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2301 VULKAN_HPP_NAMESPACE::ImageView * pView, 2302 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2303 { 2304 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2305 return static_cast<Result>( d.vkCreateImageView( m_device, 2306 reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), 2307 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2308 reinterpret_cast<VkImageView *>( pView ) ) ); 2309 } 2310 2311 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2312 template <typename Dispatch> 2313 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2314 Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2315 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2316 Dispatch const & d ) const 2317 { 2318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2319 2320 VULKAN_HPP_NAMESPACE::ImageView view; 2321 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2322 d.vkCreateImageView( m_device, 2323 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2324 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2325 reinterpret_cast<VkImageView *>( &view ) ) ); 2326 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); 2327 2328 return createResultValueType( result, view ); 2329 } 2330 2331 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2332 template <typename Dispatch> 2333 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2334 Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, 2335 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2336 Dispatch const & d ) const 2337 { 2338 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2339 2340 VULKAN_HPP_NAMESPACE::ImageView view; 2341 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2342 d.vkCreateImageView( m_device, 2343 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2344 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2345 reinterpret_cast<VkImageView *>( &view ) ) ); 2346 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); 2347 2348 return createResultValueType( result, 2349 UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2350 } 2351 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2352 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2353 2354 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2355 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2356 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2358 { 2359 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2360 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2361 } 2362 2363 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2364 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2365 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2366 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2367 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2368 { 2369 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2370 2371 d.vkDestroyImageView( m_device, 2372 static_cast<VkImageView>( imageView ), 2373 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2374 } 2375 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2376 2377 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2378 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2379 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2380 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2381 { 2382 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2383 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2384 } 2385 2386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2387 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2388 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2389 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2391 { 2392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2393 2394 d.vkDestroyImageView( m_device, 2395 static_cast<VkImageView>( imageView ), 2396 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2397 } 2398 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2399 2400 template <typename Dispatch> createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,Dispatch const & d) const2401 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 2402 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2403 VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, 2404 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2405 { 2406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2407 return static_cast<Result>( d.vkCreateShaderModule( m_device, 2408 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), 2409 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2410 reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); 2411 } 2412 2413 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2414 template <typename Dispatch> 2415 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2416 Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2417 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2418 Dispatch const & d ) const 2419 { 2420 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2421 2422 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2423 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2424 d.vkCreateShaderModule( m_device, 2425 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2426 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2427 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2428 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); 2429 2430 return createResultValueType( result, shaderModule ); 2431 } 2432 2433 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2434 template <typename Dispatch> 2435 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2436 Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 2437 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2438 Dispatch const & d ) const 2439 { 2440 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2441 2442 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2443 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2444 d.vkCreateShaderModule( m_device, 2445 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2446 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2447 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2448 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); 2449 2450 return createResultValueType( 2451 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2452 } 2453 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2454 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2455 2456 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2457 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2458 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2460 { 2461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2462 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2463 } 2464 2465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2466 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2467 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2468 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2470 { 2471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2472 2473 d.vkDestroyShaderModule( m_device, 2474 static_cast<VkShaderModule>( shaderModule ), 2475 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2476 } 2477 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2478 2479 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2480 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2481 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2482 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2483 { 2484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2485 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2486 } 2487 2488 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2489 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2490 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2491 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2492 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2493 { 2494 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2495 2496 d.vkDestroyShaderModule( m_device, 2497 static_cast<VkShaderModule>( shaderModule ), 2498 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2499 } 2500 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2501 2502 template <typename Dispatch> createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,Dispatch const & d) const2503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, 2504 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2505 VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, 2506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2507 { 2508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2509 return static_cast<Result>( d.vkCreatePipelineCache( m_device, 2510 reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), 2511 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2512 reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); 2513 } 2514 2515 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2516 template <typename Dispatch> 2517 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2518 Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2519 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2520 Dispatch const & d ) const 2521 { 2522 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2523 2524 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2525 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2526 d.vkCreatePipelineCache( m_device, 2527 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2528 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2529 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2530 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); 2531 2532 return createResultValueType( result, pipelineCache ); 2533 } 2534 2535 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2536 template <typename Dispatch> 2537 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2538 Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, 2539 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2540 Dispatch const & d ) const 2541 { 2542 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2543 2544 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2545 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2546 d.vkCreatePipelineCache( m_device, 2547 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2548 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2549 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2550 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); 2551 2552 return createResultValueType( 2553 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2554 } 2555 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2556 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2557 2558 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2559 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2560 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2561 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2562 { 2563 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2564 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2565 } 2566 2567 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2568 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2569 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2570 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2572 { 2573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2574 2575 d.vkDestroyPipelineCache( m_device, 2576 static_cast<VkPipelineCache>( pipelineCache ), 2577 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2578 } 2579 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2580 2581 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2582 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2583 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2584 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2585 { 2586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2587 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2588 } 2589 2590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2591 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2592 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2593 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2594 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2595 { 2596 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2597 2598 d.vkDestroyPipelineCache( m_device, 2599 static_cast<VkPipelineCache>( pipelineCache ), 2600 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2601 } 2602 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2603 2604 template <typename Dispatch> getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,size_t * pDataSize,void * pData,Dispatch const & d) const2605 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2606 size_t * pDataSize, 2607 void * pData, 2608 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2609 { 2610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2611 return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); 2612 } 2613 2614 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2615 template <typename Uint8_tAllocator, typename Dispatch> 2616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Dispatch const & d) const2617 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const 2618 { 2619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2620 2621 std::vector<uint8_t, Uint8_tAllocator> data; 2622 size_t dataSize; 2623 VULKAN_HPP_NAMESPACE::Result result; 2624 do 2625 { 2626 result = 2627 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2628 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2629 { 2630 data.resize( dataSize ); 2631 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2632 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2633 } 2634 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2635 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2636 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2637 if ( dataSize < data.size() ) 2638 { 2639 data.resize( dataSize ); 2640 } 2641 return createResultValueType( result, data ); 2642 } 2643 2644 template <typename Uint8_tAllocator, 2645 typename Dispatch, 2646 typename B1, 2647 typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> 2648 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const2649 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 2650 { 2651 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2652 2653 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 2654 size_t dataSize; 2655 VULKAN_HPP_NAMESPACE::Result result; 2656 do 2657 { 2658 result = 2659 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2660 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 2661 { 2662 data.resize( dataSize ); 2663 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 2664 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 2665 } 2666 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 2667 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2668 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2669 if ( dataSize < data.size() ) 2670 { 2671 data.resize( dataSize ); 2672 } 2673 return createResultValueType( result, data ); 2674 } 2675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2676 2677 template <typename Dispatch> mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,Dispatch const & d) const2678 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2679 uint32_t srcCacheCount, 2680 const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, 2681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2682 { 2683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2684 return static_cast<Result>( 2685 d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); 2686 } 2687 2688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2689 template <typename Dispatch> 2690 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,Dispatch const & d) const2691 Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2692 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, 2693 Dispatch const & d ) const 2694 { 2695 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2696 2697 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergePipelineCaches( 2698 m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) ); 2699 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); 2700 2701 return createResultValueType( result ); 2702 } 2703 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2704 2705 template <typename Dispatch> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const2706 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2707 uint32_t createInfoCount, 2708 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, 2709 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2710 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 2711 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2712 { 2713 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2714 return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, 2715 static_cast<VkPipelineCache>( pipelineCache ), 2716 createInfoCount, 2717 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), 2718 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2719 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 2720 } 2721 2722 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2723 template <typename PipelineAllocator, typename Dispatch> 2724 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2725 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2726 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2727 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2728 Dispatch const & d ) const 2729 { 2730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2731 2732 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 2733 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 2734 m_device, 2735 static_cast<VkPipelineCache>( pipelineCache ), 2736 createInfos.size(), 2737 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2738 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2739 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2740 resultCheck( result, 2741 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 2742 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2743 2744 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 2745 } 2746 2747 template <typename PipelineAllocator, 2748 typename Dispatch, 2749 typename B0, 2750 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 2751 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2752 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2753 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2754 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2755 PipelineAllocator & pipelineAllocator, 2756 Dispatch const & d ) const 2757 { 2758 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2759 2760 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 2761 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 2762 m_device, 2763 static_cast<VkPipelineCache>( pipelineCache ), 2764 createInfos.size(), 2765 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2766 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2767 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2768 resultCheck( result, 2769 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 2770 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2771 2772 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 2773 } 2774 2775 template <typename Dispatch> 2776 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2777 Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2778 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 2779 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2780 Dispatch const & d ) const 2781 { 2782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2783 2784 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 2785 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 2786 m_device, 2787 static_cast<VkPipelineCache>( pipelineCache ), 2788 1, 2789 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 2790 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2791 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 2792 resultCheck( result, 2793 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", 2794 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2795 2796 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); 2797 } 2798 2799 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2800 template <typename Dispatch, typename PipelineAllocator> 2801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2802 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2803 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2804 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2805 Dispatch const & d ) const 2806 { 2807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2808 2809 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 2810 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 2811 m_device, 2812 static_cast<VkPipelineCache>( pipelineCache ), 2813 createInfos.size(), 2814 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2815 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2816 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2817 resultCheck( result, 2818 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 2819 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2820 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 2821 uniquePipelines.reserve( createInfos.size() ); 2822 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2823 for ( auto const & pipeline : pipelines ) 2824 { 2825 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 2826 } 2827 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 2828 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 2829 } 2830 2831 template <typename Dispatch, 2832 typename PipelineAllocator, 2833 typename B0, 2834 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 2835 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2836 Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2837 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2838 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2839 PipelineAllocator & pipelineAllocator, 2840 Dispatch const & d ) const 2841 { 2842 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2843 2844 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 2845 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 2846 m_device, 2847 static_cast<VkPipelineCache>( pipelineCache ), 2848 createInfos.size(), 2849 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2850 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2851 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2852 resultCheck( result, 2853 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 2854 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2855 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 2856 uniquePipelines.reserve( createInfos.size() ); 2857 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2858 for ( auto const & pipeline : pipelines ) 2859 { 2860 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 2861 } 2862 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 2863 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 2864 } 2865 2866 template <typename Dispatch> 2867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2868 Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2869 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 2870 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2871 Dispatch const & d ) const 2872 { 2873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2874 2875 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 2876 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateGraphicsPipelines( 2877 m_device, 2878 static_cast<VkPipelineCache>( pipelineCache ), 2879 1, 2880 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 2881 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2882 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 2883 resultCheck( result, 2884 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", 2885 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2886 2887 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 2888 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 2889 UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 2890 } 2891 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 2892 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 2893 2894 template <typename Dispatch> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const2895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2896 uint32_t createInfoCount, 2897 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, 2898 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2899 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 2900 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2901 { 2902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2903 return static_cast<Result>( d.vkCreateComputePipelines( m_device, 2904 static_cast<VkPipelineCache>( pipelineCache ), 2905 createInfoCount, 2906 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), 2907 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2908 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 2909 } 2910 2911 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2912 template <typename PipelineAllocator, typename Dispatch> 2913 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2914 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2915 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2916 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2917 Dispatch const & d ) const 2918 { 2919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2920 2921 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 2922 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 2923 m_device, 2924 static_cast<VkPipelineCache>( pipelineCache ), 2925 createInfos.size(), 2926 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 2927 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2928 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2929 resultCheck( result, 2930 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 2931 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2932 2933 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 2934 } 2935 2936 template <typename PipelineAllocator, 2937 typename Dispatch, 2938 typename B0, 2939 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 2940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2941 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2942 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2943 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2944 PipelineAllocator & pipelineAllocator, 2945 Dispatch const & d ) const 2946 { 2947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2948 2949 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 2950 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 2951 m_device, 2952 static_cast<VkPipelineCache>( pipelineCache ), 2953 createInfos.size(), 2954 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 2955 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2956 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2957 resultCheck( result, 2958 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 2959 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2960 2961 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 2962 } 2963 2964 template <typename Dispatch> 2965 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2966 Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2967 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 2968 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2969 Dispatch const & d ) const 2970 { 2971 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2972 2973 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 2974 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 2975 m_device, 2976 static_cast<VkPipelineCache>( pipelineCache ), 2977 1, 2978 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 2979 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2980 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 2981 resultCheck( result, 2982 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", 2983 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2984 2985 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); 2986 } 2987 2988 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2989 template <typename Dispatch, typename PipelineAllocator> 2990 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2991 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2992 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2993 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 2994 Dispatch const & d ) const 2995 { 2996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 2997 2998 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 2999 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3000 m_device, 3001 static_cast<VkPipelineCache>( pipelineCache ), 3002 createInfos.size(), 3003 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3004 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3005 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3006 resultCheck( result, 3007 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3008 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3009 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 3010 uniquePipelines.reserve( createInfos.size() ); 3011 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3012 for ( auto const & pipeline : pipelines ) 3013 { 3014 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3015 } 3016 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 3017 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 3018 } 3019 3020 template <typename Dispatch, 3021 typename PipelineAllocator, 3022 typename B0, 3023 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 3024 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const3025 Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3026 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 3027 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3028 PipelineAllocator & pipelineAllocator, 3029 Dispatch const & d ) const 3030 { 3031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3032 3033 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 3034 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3035 m_device, 3036 static_cast<VkPipelineCache>( pipelineCache ), 3037 createInfos.size(), 3038 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 3039 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3040 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 3041 resultCheck( result, 3042 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 3043 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3044 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 3045 uniquePipelines.reserve( createInfos.size() ); 3046 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3047 for ( auto const & pipeline : pipelines ) 3048 { 3049 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 3050 } 3051 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 3052 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 3053 } 3054 3055 template <typename Dispatch> 3056 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3057 Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 3058 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 3059 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3060 Dispatch const & d ) const 3061 { 3062 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3063 3064 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 3065 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateComputePipelines( 3066 m_device, 3067 static_cast<VkPipelineCache>( pipelineCache ), 3068 1, 3069 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3070 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3071 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3072 resultCheck( result, 3073 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", 3074 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 3075 3076 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 3077 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 3078 UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3079 } 3080 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3081 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3082 3083 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3084 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3085 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3086 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3087 { 3088 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3089 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3090 } 3091 3092 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3093 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3094 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3095 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3096 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3097 { 3098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3099 3100 d.vkDestroyPipeline( m_device, 3101 static_cast<VkPipeline>( pipeline ), 3102 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3103 } 3104 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3105 3106 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3107 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3108 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3109 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3110 { 3111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3112 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3113 } 3114 3115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3116 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3117 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3118 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3120 { 3121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3122 3123 d.vkDestroyPipeline( m_device, 3124 static_cast<VkPipeline>( pipeline ), 3125 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3126 } 3127 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3128 3129 template <typename Dispatch> createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,Dispatch const & d) const3130 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, 3131 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3132 VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, 3133 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3134 { 3135 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3136 return static_cast<Result>( d.vkCreatePipelineLayout( m_device, 3137 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), 3138 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3139 reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); 3140 } 3141 3142 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3143 template <typename Dispatch> 3144 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3145 Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3146 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3147 Dispatch const & d ) const 3148 { 3149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3150 3151 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3152 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3153 d.vkCreatePipelineLayout( m_device, 3154 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3155 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3156 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3157 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); 3158 3159 return createResultValueType( result, pipelineLayout ); 3160 } 3161 3162 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3163 template <typename Dispatch> 3164 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3165 Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, 3166 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3167 Dispatch const & d ) const 3168 { 3169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3170 3171 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3172 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3173 d.vkCreatePipelineLayout( m_device, 3174 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3175 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3176 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3177 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); 3178 3179 return createResultValueType( 3180 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3181 } 3182 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3183 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3184 3185 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3186 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3187 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3189 { 3190 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3191 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3192 } 3193 3194 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3195 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3196 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3197 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3199 { 3200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3201 3202 d.vkDestroyPipelineLayout( m_device, 3203 static_cast<VkPipelineLayout>( pipelineLayout ), 3204 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3205 } 3206 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3207 3208 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3209 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3210 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3211 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3212 { 3213 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3214 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3215 } 3216 3217 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3218 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3219 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3220 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3221 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3222 { 3223 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3224 3225 d.vkDestroyPipelineLayout( m_device, 3226 static_cast<VkPipelineLayout>( pipelineLayout ), 3227 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3228 } 3229 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3230 3231 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Sampler * pSampler,Dispatch const & d) const3232 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, 3233 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3234 VULKAN_HPP_NAMESPACE::Sampler * pSampler, 3235 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3236 { 3237 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3238 return static_cast<Result>( d.vkCreateSampler( m_device, 3239 reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), 3240 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3241 reinterpret_cast<VkSampler *>( pSampler ) ) ); 3242 } 3243 3244 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3245 template <typename Dispatch> createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3246 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( 3247 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3248 { 3249 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3250 3251 VULKAN_HPP_NAMESPACE::Sampler sampler; 3252 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3253 d.vkCreateSampler( m_device, 3254 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3255 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3256 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3257 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); 3258 3259 return createResultValueType( result, sampler ); 3260 } 3261 3262 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3263 template <typename Dispatch> createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3264 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( 3265 const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 3266 { 3267 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3268 3269 VULKAN_HPP_NAMESPACE::Sampler sampler; 3270 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3271 d.vkCreateSampler( m_device, 3272 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3273 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3274 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3275 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); 3276 3277 return createResultValueType( result, 3278 UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3279 } 3280 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3281 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3282 3283 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3284 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3285 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3286 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3287 { 3288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3289 d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3290 } 3291 3292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3293 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3294 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3295 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3297 { 3298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3299 3300 d.vkDestroySampler( m_device, 3301 static_cast<VkSampler>( sampler ), 3302 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3303 } 3304 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3305 3306 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3307 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3308 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3310 { 3311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3312 d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3313 } 3314 3315 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3316 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3317 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3318 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3320 { 3321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3322 3323 d.vkDestroySampler( m_device, 3324 static_cast<VkSampler>( sampler ), 3325 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3326 } 3327 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3328 3329 template <typename Dispatch> createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,Dispatch const & d) const3330 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 3331 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3332 VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, 3333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3334 { 3335 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3336 return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, 3337 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 3338 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3339 reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); 3340 } 3341 3342 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3343 template <typename Dispatch> 3344 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3345 Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3346 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3347 Dispatch const & d ) const 3348 { 3349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3350 3351 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3352 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3353 m_device, 3354 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3355 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3356 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3357 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); 3358 3359 return createResultValueType( result, setLayout ); 3360 } 3361 3362 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3363 template <typename Dispatch> 3364 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3365 Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 3366 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3367 Dispatch const & d ) const 3368 { 3369 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3370 3371 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3372 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorSetLayout( 3373 m_device, 3374 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3375 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3376 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3377 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); 3378 3379 return createResultValueType( 3380 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3381 } 3382 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3383 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3384 3385 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3386 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3387 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3388 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3389 { 3390 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3391 d.vkDestroyDescriptorSetLayout( 3392 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3393 } 3394 3395 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3396 template <typename Dispatch> destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3397 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3398 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3400 { 3401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3402 3403 d.vkDestroyDescriptorSetLayout( 3404 m_device, 3405 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3406 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3407 } 3408 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3409 3410 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3411 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3412 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3413 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3414 { 3415 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3416 d.vkDestroyDescriptorSetLayout( 3417 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3418 } 3419 3420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3421 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3422 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3423 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3424 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3425 { 3426 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3427 3428 d.vkDestroyDescriptorSetLayout( 3429 m_device, 3430 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3431 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3432 } 3433 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3434 3435 template <typename Dispatch> createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,Dispatch const & d) const3436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, 3437 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3438 VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, 3439 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3440 { 3441 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3442 return static_cast<Result>( d.vkCreateDescriptorPool( m_device, 3443 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), 3444 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3445 reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); 3446 } 3447 3448 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3449 template <typename Dispatch> 3450 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3451 Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3452 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3453 Dispatch const & d ) const 3454 { 3455 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3456 3457 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3458 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3459 d.vkCreateDescriptorPool( m_device, 3460 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3461 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3462 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3463 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); 3464 3465 return createResultValueType( result, descriptorPool ); 3466 } 3467 3468 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3469 template <typename Dispatch> 3470 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3471 Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, 3472 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3473 Dispatch const & d ) const 3474 { 3475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3476 3477 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3478 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3479 d.vkCreateDescriptorPool( m_device, 3480 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3481 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3482 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3483 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); 3484 3485 return createResultValueType( 3486 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3487 } 3488 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3489 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3490 3491 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3492 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3493 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3494 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3495 { 3496 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3497 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3498 } 3499 3500 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3501 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3502 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3503 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3504 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3505 { 3506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3507 3508 d.vkDestroyDescriptorPool( m_device, 3509 static_cast<VkDescriptorPool>( descriptorPool ), 3510 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3511 } 3512 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3513 3514 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3515 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3516 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3517 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3518 { 3519 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3520 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3521 } 3522 3523 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3524 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3525 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3526 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3528 { 3529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3530 3531 d.vkDestroyDescriptorPool( m_device, 3532 static_cast<VkDescriptorPool>( descriptorPool ), 3533 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3534 } 3535 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3536 3537 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 3538 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3539 VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3540 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3541 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3542 { 3543 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3544 return static_cast<Result>( 3545 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); 3546 } 3547 #else 3548 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3549 VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3550 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3551 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3552 { 3553 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3554 3555 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ); 3556 } 3557 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3558 3559 template <typename Dispatch> allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3560 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, 3561 VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3562 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3563 { 3564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3565 return static_cast<Result>( d.vkAllocateDescriptorSets( 3566 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); 3567 } 3568 3569 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3570 template <typename DescriptorSetAllocator, typename Dispatch> 3571 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3572 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3573 { 3574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3575 3576 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); 3577 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3578 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3579 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3580 3581 return createResultValueType( result, descriptorSets ); 3582 } 3583 3584 template <typename DescriptorSetAllocator, 3585 typename Dispatch, 3586 typename B0, 3587 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::DescriptorSet>::value, int>::type> 3588 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const3589 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 3590 DescriptorSetAllocator & descriptorSetAllocator, 3591 Dispatch const & d ) const 3592 { 3593 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3594 3595 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); 3596 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3597 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3598 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3599 3600 return createResultValueType( result, descriptorSets ); 3601 } 3602 3603 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3604 template <typename Dispatch, typename DescriptorSetAllocator> 3605 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 3606 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3607 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3608 { 3609 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3610 3611 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 3612 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3613 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3614 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 3615 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; 3616 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 3617 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 3618 for ( auto const & descriptorSet : descriptorSets ) 3619 { 3620 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 3621 } 3622 return createResultValueType( result, std::move( uniqueDescriptorSets ) ); 3623 } 3624 3625 template <typename Dispatch, 3626 typename DescriptorSetAllocator, 3627 typename B0, 3628 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>::value, int>::type> 3629 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 3630 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const3631 Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, 3632 DescriptorSetAllocator & descriptorSetAllocator, 3633 Dispatch const & d ) const 3634 { 3635 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3636 3637 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 3638 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateDescriptorSets( 3639 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3640 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 3641 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); 3642 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 3643 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 3644 for ( auto const & descriptorSet : descriptorSets ) 3645 { 3646 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); 3647 } 3648 return createResultValueType( result, std::move( uniqueDescriptorSets ) ); 3649 } 3650 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3651 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3652 3653 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3654 VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3655 uint32_t descriptorSetCount, 3656 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3657 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3658 { 3659 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3660 return static_cast<Result>( d.vkFreeDescriptorSets( 3661 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 3662 } 3663 3664 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3665 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const3666 VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3667 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 3668 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3669 { 3670 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3671 3672 d.vkFreeDescriptorSets( 3673 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 3674 } 3675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3676 3677 template <typename Dispatch> Result(Device::free)3678 VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3679 uint32_t descriptorSetCount, 3680 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3682 { 3683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3684 return static_cast<Result>( d.vkFreeDescriptorSets( 3685 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 3686 } 3687 3688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3689 template <typename Dispatch> 3690 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3691 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 3692 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3693 { 3694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3695 3696 d.vkFreeDescriptorSets( 3697 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); 3698 } 3699 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3700 3701 template <typename Dispatch> updateDescriptorSets(uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,Dispatch const & d) const3702 VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, 3703 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 3704 uint32_t descriptorCopyCount, 3705 const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, 3706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3707 { 3708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3709 d.vkUpdateDescriptorSets( m_device, 3710 descriptorWriteCount, 3711 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), 3712 descriptorCopyCount, 3713 reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); 3714 } 3715 3716 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3717 template <typename Dispatch> 3718 VULKAN_HPP_INLINE void updateDescriptorSets(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,Dispatch const & d) const3719 Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 3720 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, 3721 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3722 { 3723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3724 3725 d.vkUpdateDescriptorSets( m_device, 3726 descriptorWrites.size(), 3727 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), 3728 descriptorCopies.size(), 3729 reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); 3730 } 3731 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3732 3733 template <typename Dispatch> createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,Dispatch const & d) const3734 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, 3735 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3736 VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, 3737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3738 { 3739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3740 return static_cast<Result>( d.vkCreateFramebuffer( m_device, 3741 reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), 3742 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3743 reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); 3744 } 3745 3746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3747 template <typename Dispatch> 3748 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3749 Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 3750 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3751 Dispatch const & d ) const 3752 { 3753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3754 3755 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 3756 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3757 d.vkCreateFramebuffer( m_device, 3758 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 3759 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3760 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 3761 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); 3762 3763 return createResultValueType( result, framebuffer ); 3764 } 3765 3766 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3767 template <typename Dispatch> 3768 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3769 Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, 3770 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3771 Dispatch const & d ) const 3772 { 3773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3774 3775 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 3776 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3777 d.vkCreateFramebuffer( m_device, 3778 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 3779 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3780 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 3781 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); 3782 3783 return createResultValueType( 3784 result, UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3785 } 3786 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3787 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3788 3789 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3790 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3791 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3793 { 3794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3795 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3796 } 3797 3798 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3799 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3800 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3801 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3803 { 3804 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3805 3806 d.vkDestroyFramebuffer( m_device, 3807 static_cast<VkFramebuffer>( framebuffer ), 3808 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3809 } 3810 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3811 3812 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3813 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3814 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3815 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3816 { 3817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3818 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3819 } 3820 3821 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3822 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3823 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3824 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3825 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3826 { 3827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3828 3829 d.vkDestroyFramebuffer( m_device, 3830 static_cast<VkFramebuffer>( framebuffer ), 3831 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3832 } 3833 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3834 3835 template <typename Dispatch> createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const3836 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, 3837 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3838 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 3839 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3840 { 3841 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3842 return static_cast<Result>( d.vkCreateRenderPass( m_device, 3843 reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), 3844 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3845 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 3846 } 3847 3848 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3849 template <typename Dispatch> 3850 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3851 Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 3852 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3853 Dispatch const & d ) const 3854 { 3855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3856 3857 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 3858 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3859 d.vkCreateRenderPass( m_device, 3860 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 3861 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3862 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 3863 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); 3864 3865 return createResultValueType( result, renderPass ); 3866 } 3867 3868 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3869 template <typename Dispatch> 3870 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3871 Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, 3872 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3873 Dispatch const & d ) const 3874 { 3875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3876 3877 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 3878 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3879 d.vkCreateRenderPass( m_device, 3880 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 3881 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3882 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 3883 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); 3884 3885 return createResultValueType( 3886 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 3887 } 3888 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 3889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3890 3891 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3892 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3893 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3895 { 3896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3897 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3898 } 3899 3900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3901 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3902 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3903 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3905 { 3906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3907 3908 d.vkDestroyRenderPass( m_device, 3909 static_cast<VkRenderPass>( renderPass ), 3910 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3911 } 3912 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3913 3914 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3915 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3916 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3918 { 3919 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3920 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3921 } 3922 3923 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3924 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3925 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3926 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3928 { 3929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3930 3931 d.vkDestroyRenderPass( m_device, 3932 static_cast<VkRenderPass>( renderPass ), 3933 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3934 } 3935 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3936 3937 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const3938 VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3939 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 3940 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3941 { 3942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3943 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 3944 } 3945 3946 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3947 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Dispatch const & d) const3948 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3949 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3950 { 3951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3952 3953 VULKAN_HPP_NAMESPACE::Extent2D granularity; 3954 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 3955 3956 return granularity; 3957 } 3958 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 3959 3960 template <typename Dispatch> createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,Dispatch const & d) const3961 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, 3962 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3963 VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, 3964 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3965 { 3966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3967 return static_cast<Result>( d.vkCreateCommandPool( m_device, 3968 reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), 3969 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3970 reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); 3971 } 3972 3973 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3974 template <typename Dispatch> 3975 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3976 Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 3977 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3978 Dispatch const & d ) const 3979 { 3980 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 3981 3982 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 3983 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 3984 d.vkCreateCommandPool( m_device, 3985 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 3986 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3987 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 3988 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); 3989 3990 return createResultValueType( result, commandPool ); 3991 } 3992 3993 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3994 template <typename Dispatch> 3995 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const3996 Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, 3997 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 3998 Dispatch const & d ) const 3999 { 4000 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4001 4002 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 4003 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4004 d.vkCreateCommandPool( m_device, 4005 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 4006 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 4007 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 4008 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); 4009 4010 return createResultValueType( 4011 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 4012 } 4013 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4014 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4015 4016 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4017 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4018 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4019 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4020 { 4021 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4022 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4023 } 4024 4025 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4026 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4027 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4028 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4029 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4030 { 4031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4032 4033 d.vkDestroyCommandPool( m_device, 4034 static_cast<VkCommandPool>( commandPool ), 4035 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4036 } 4037 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4038 4039 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4040 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4041 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 4042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4043 { 4044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4045 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 4046 } 4047 4048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4049 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4050 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4051 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 4052 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4053 { 4054 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4055 4056 d.vkDestroyCommandPool( m_device, 4057 static_cast<VkCommandPool>( commandPool ), 4058 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 4059 } 4060 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4061 4062 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4063 template <typename Dispatch> resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4064 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4065 VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, 4066 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4067 { 4068 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4069 return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4070 } 4071 #else 4072 template <typename Dispatch> 4073 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4074 Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const 4075 { 4076 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4077 4078 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 4079 d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4080 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); 4081 4082 return createResultValueType( result ); 4083 } 4084 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4085 4086 template <typename Dispatch> allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4087 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, 4088 VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4090 { 4091 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4092 return static_cast<Result>( d.vkAllocateCommandBuffers( 4093 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); 4094 } 4095 4096 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4097 template <typename CommandBufferAllocator, typename Dispatch> 4098 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4099 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4100 { 4101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4102 4103 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); 4104 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4105 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4106 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4107 4108 return createResultValueType( result, commandBuffers ); 4109 } 4110 4111 template <typename CommandBufferAllocator, 4112 typename Dispatch, 4113 typename B0, 4114 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::CommandBuffer>::value, int>::type> 4115 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4116 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4117 CommandBufferAllocator & commandBufferAllocator, 4118 Dispatch const & d ) const 4119 { 4120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4121 4122 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); 4123 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4124 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4125 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4126 4127 return createResultValueType( result, commandBuffers ); 4128 } 4129 4130 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4131 template <typename Dispatch, typename CommandBufferAllocator> 4132 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4133 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4134 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4135 { 4136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4137 4138 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4139 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4140 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4141 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4142 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; 4143 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4144 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4145 for ( auto const & commandBuffer : commandBuffers ) 4146 { 4147 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4148 } 4149 return createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4150 } 4151 4152 template <typename Dispatch, 4153 typename CommandBufferAllocator, 4154 typename B0, 4155 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>::value, int>::type> 4156 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 4157 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4158 Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, 4159 CommandBufferAllocator & commandBufferAllocator, 4160 Dispatch const & d ) const 4161 { 4162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4163 4164 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4165 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAllocateCommandBuffers( 4166 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4167 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4168 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); 4169 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4170 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4171 for ( auto const & commandBuffer : commandBuffers ) 4172 { 4173 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); 4174 } 4175 return createResultValueType( result, std::move( uniqueCommandBuffers ) ); 4176 } 4177 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 4178 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4179 4180 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4181 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4182 uint32_t commandBufferCount, 4183 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4184 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4185 { 4186 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4187 d.vkFreeCommandBuffers( 4188 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4189 } 4190 4191 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4192 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const4193 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4194 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4195 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4196 { 4197 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4198 4199 d.vkFreeCommandBuffers( 4200 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4201 } 4202 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4203 4204 template <typename Dispatch> 4205 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4206 uint32_t commandBufferCount, 4207 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4209 { 4210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4211 d.vkFreeCommandBuffers( 4212 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4213 } 4214 4215 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4216 template <typename Dispatch> 4217 VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4218 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4219 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4220 { 4221 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4222 4223 d.vkFreeCommandBuffers( 4224 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4225 } 4226 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4227 4228 template <typename Dispatch> begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,Dispatch const & d) const4229 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, 4230 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4231 { 4232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4233 return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); 4234 } 4235 4236 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4237 template <typename Dispatch> 4238 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,Dispatch const & d) const4239 CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const 4240 { 4241 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4242 4243 VULKAN_HPP_NAMESPACE::Result result = 4244 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) ); 4245 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); 4246 4247 return createResultValueType( result ); 4248 } 4249 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4250 4251 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4252 template <typename Dispatch> end(Dispatch const & d) const4253 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4254 { 4255 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4256 return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4257 } 4258 #else 4259 template <typename Dispatch> end(Dispatch const & d) const4260 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const 4261 { 4262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4263 4264 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4265 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); 4266 4267 return createResultValueType( result ); 4268 } 4269 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4270 4271 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4272 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4273 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, 4274 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4275 { 4276 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4277 return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4278 } 4279 #else 4280 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4281 VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const 4282 { 4283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4284 4285 VULKAN_HPP_NAMESPACE::Result result = 4286 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4287 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); 4288 4289 return createResultValueType( result ); 4290 } 4291 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4292 4293 template <typename Dispatch> bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const4294 VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4295 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 4296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4297 { 4298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4299 d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 4300 } 4301 4302 template <typename Dispatch> setViewport(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const4303 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4304 uint32_t viewportCount, 4305 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 4306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4307 { 4308 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4309 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 4310 } 4311 4312 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4313 template <typename Dispatch> setViewport(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const4314 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4315 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 4316 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4317 { 4318 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4319 4320 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 4321 } 4322 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4323 4324 template <typename Dispatch> setScissor(uint32_t firstScissor,uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const4325 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4326 uint32_t scissorCount, 4327 const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, 4328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4329 { 4330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4331 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 4332 } 4333 4334 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4335 template <typename Dispatch> setScissor(uint32_t firstScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const4336 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4337 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 4338 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4339 { 4340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4341 4342 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 4343 } 4344 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4345 4346 template <typename Dispatch> setLineWidth(float lineWidth,Dispatch const & d) const4347 VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4348 { 4349 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4350 d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); 4351 } 4352 4353 template <typename Dispatch> 4354 VULKAN_HPP_INLINE void setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor,Dispatch const & d) const4355 CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4356 { 4357 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4358 d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); 4359 } 4360 4361 template <typename Dispatch> setBlendConstants(const float blendConstants[4],Dispatch const & d) const4362 VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4363 { 4364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4365 d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); 4366 } 4367 4368 template <typename Dispatch> setDepthBounds(float minDepthBounds,float maxDepthBounds,Dispatch const & d) const4369 VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4370 { 4371 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4372 d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); 4373 } 4374 4375 template <typename Dispatch> 4376 VULKAN_HPP_INLINE void setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t compareMask,Dispatch const & d) const4377 CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4378 { 4379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4380 d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); 4381 } 4382 4383 template <typename Dispatch> 4384 VULKAN_HPP_INLINE void setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t writeMask,Dispatch const & d) const4385 CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4386 { 4387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4388 d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); 4389 } 4390 4391 template <typename Dispatch> 4392 VULKAN_HPP_INLINE void setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t reference,Dispatch const & d) const4393 CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4394 { 4395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4396 d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference ); 4397 } 4398 4399 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets,Dispatch const & d) const4400 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4401 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4402 uint32_t firstSet, 4403 uint32_t descriptorSetCount, 4404 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4405 uint32_t dynamicOffsetCount, 4406 const uint32_t * pDynamicOffsets, 4407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4408 { 4409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4410 d.vkCmdBindDescriptorSets( m_commandBuffer, 4411 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4412 static_cast<VkPipelineLayout>( layout ), 4413 firstSet, 4414 descriptorSetCount, 4415 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), 4416 dynamicOffsetCount, 4417 pDynamicOffsets ); 4418 } 4419 4420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4421 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,Dispatch const & d) const4422 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4423 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4424 uint32_t firstSet, 4425 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4426 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, 4427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4428 { 4429 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4430 4431 d.vkCmdBindDescriptorSets( m_commandBuffer, 4432 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4433 static_cast<VkPipelineLayout>( layout ), 4434 firstSet, 4435 descriptorSets.size(), 4436 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), 4437 dynamicOffsets.size(), 4438 dynamicOffsets.data() ); 4439 } 4440 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4441 4442 template <typename Dispatch> bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const4443 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 4444 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4445 VULKAN_HPP_NAMESPACE::IndexType indexType, 4446 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4447 { 4448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4449 d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) ); 4450 } 4451 4452 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const4453 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4454 uint32_t bindingCount, 4455 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 4456 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 4457 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4458 { 4459 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4460 d.vkCmdBindVertexBuffers( 4461 m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 4462 } 4463 4464 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4465 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const4466 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4467 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 4468 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 4469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 4470 { 4471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4472 # ifdef VULKAN_HPP_NO_EXCEPTIONS 4473 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 4474 # else 4475 if ( buffers.size() != offsets.size() ) 4476 { 4477 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); 4478 } 4479 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 4480 4481 d.vkCmdBindVertexBuffers( m_commandBuffer, 4482 firstBinding, 4483 buffers.size(), 4484 reinterpret_cast<const VkBuffer *>( buffers.data() ), 4485 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 4486 } 4487 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4488 4489 template <typename Dispatch> draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance,Dispatch const & d) const4490 VULKAN_HPP_INLINE void CommandBuffer::draw( 4491 uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4492 { 4493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4494 d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); 4495 } 4496 4497 template <typename Dispatch> drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance,Dispatch const & d) const4498 VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, 4499 uint32_t instanceCount, 4500 uint32_t firstIndex, 4501 int32_t vertexOffset, 4502 uint32_t firstInstance, 4503 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4504 { 4505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4506 d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); 4507 } 4508 4509 template <typename Dispatch> drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const4510 VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4511 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4512 uint32_t drawCount, 4513 uint32_t stride, 4514 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4515 { 4516 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4517 d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 4518 } 4519 4520 template <typename Dispatch> drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const4521 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4522 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4523 uint32_t drawCount, 4524 uint32_t stride, 4525 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4526 { 4527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4528 d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 4529 } 4530 4531 template <typename Dispatch> 4532 VULKAN_HPP_INLINE void dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const4533 CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4534 { 4535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4536 d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 4537 } 4538 4539 template <typename Dispatch> dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const4540 VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4541 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4542 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4543 { 4544 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4545 d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 4546 } 4547 4548 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,Dispatch const & d) const4549 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4550 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4551 uint32_t regionCount, 4552 const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, 4553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4554 { 4555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4556 d.vkCmdCopyBuffer( m_commandBuffer, 4557 static_cast<VkBuffer>( srcBuffer ), 4558 static_cast<VkBuffer>( dstBuffer ), 4559 regionCount, 4560 reinterpret_cast<const VkBufferCopy *>( pRegions ) ); 4561 } 4562 4563 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4564 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,Dispatch const & d) const4565 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4566 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4567 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, 4568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4569 { 4570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4571 4572 d.vkCmdCopyBuffer( m_commandBuffer, 4573 static_cast<VkBuffer>( srcBuffer ), 4574 static_cast<VkBuffer>( dstBuffer ), 4575 regions.size(), 4576 reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); 4577 } 4578 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4579 4580 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,Dispatch const & d) const4581 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4582 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4583 VULKAN_HPP_NAMESPACE::Image dstImage, 4584 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4585 uint32_t regionCount, 4586 const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, 4587 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4588 { 4589 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4590 d.vkCmdCopyImage( m_commandBuffer, 4591 static_cast<VkImage>( srcImage ), 4592 static_cast<VkImageLayout>( srcImageLayout ), 4593 static_cast<VkImage>( dstImage ), 4594 static_cast<VkImageLayout>( dstImageLayout ), 4595 regionCount, 4596 reinterpret_cast<const VkImageCopy *>( pRegions ) ); 4597 } 4598 4599 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4600 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,Dispatch const & d) const4601 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4602 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4603 VULKAN_HPP_NAMESPACE::Image dstImage, 4604 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4605 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, 4606 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4607 { 4608 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4609 4610 d.vkCmdCopyImage( m_commandBuffer, 4611 static_cast<VkImage>( srcImage ), 4612 static_cast<VkImageLayout>( srcImageLayout ), 4613 static_cast<VkImage>( dstImage ), 4614 static_cast<VkImageLayout>( dstImageLayout ), 4615 regions.size(), 4616 reinterpret_cast<const VkImageCopy *>( regions.data() ) ); 4617 } 4618 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4619 4620 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const4621 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4622 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4623 VULKAN_HPP_NAMESPACE::Image dstImage, 4624 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4625 uint32_t regionCount, 4626 const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, 4627 VULKAN_HPP_NAMESPACE::Filter filter, 4628 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4629 { 4630 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4631 d.vkCmdBlitImage( m_commandBuffer, 4632 static_cast<VkImage>( srcImage ), 4633 static_cast<VkImageLayout>( srcImageLayout ), 4634 static_cast<VkImage>( dstImage ), 4635 static_cast<VkImageLayout>( dstImageLayout ), 4636 regionCount, 4637 reinterpret_cast<const VkImageBlit *>( pRegions ), 4638 static_cast<VkFilter>( filter ) ); 4639 } 4640 4641 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4642 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const4643 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4644 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4645 VULKAN_HPP_NAMESPACE::Image dstImage, 4646 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4647 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, 4648 VULKAN_HPP_NAMESPACE::Filter filter, 4649 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4650 { 4651 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4652 4653 d.vkCmdBlitImage( m_commandBuffer, 4654 static_cast<VkImage>( srcImage ), 4655 static_cast<VkImageLayout>( srcImageLayout ), 4656 static_cast<VkImage>( dstImage ), 4657 static_cast<VkImageLayout>( dstImageLayout ), 4658 regions.size(), 4659 reinterpret_cast<const VkImageBlit *>( regions.data() ), 4660 static_cast<VkFilter>( filter ) ); 4661 } 4662 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4663 4664 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const4665 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4666 VULKAN_HPP_NAMESPACE::Image dstImage, 4667 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4668 uint32_t regionCount, 4669 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 4670 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4671 { 4672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4673 d.vkCmdCopyBufferToImage( m_commandBuffer, 4674 static_cast<VkBuffer>( srcBuffer ), 4675 static_cast<VkImage>( dstImage ), 4676 static_cast<VkImageLayout>( dstImageLayout ), 4677 regionCount, 4678 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 4679 } 4680 4681 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4682 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const4683 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4684 VULKAN_HPP_NAMESPACE::Image dstImage, 4685 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4686 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 4687 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4688 { 4689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4690 4691 d.vkCmdCopyBufferToImage( m_commandBuffer, 4692 static_cast<VkBuffer>( srcBuffer ), 4693 static_cast<VkImage>( dstImage ), 4694 static_cast<VkImageLayout>( dstImageLayout ), 4695 regions.size(), 4696 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 4697 } 4698 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4699 4700 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const4701 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 4702 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4703 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4704 uint32_t regionCount, 4705 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 4706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4707 { 4708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4709 d.vkCmdCopyImageToBuffer( m_commandBuffer, 4710 static_cast<VkImage>( srcImage ), 4711 static_cast<VkImageLayout>( srcImageLayout ), 4712 static_cast<VkBuffer>( dstBuffer ), 4713 regionCount, 4714 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 4715 } 4716 4717 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4718 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const4719 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 4720 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4721 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4722 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 4723 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4724 { 4725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4726 4727 d.vkCmdCopyImageToBuffer( m_commandBuffer, 4728 static_cast<VkImage>( srcImage ), 4729 static_cast<VkImageLayout>( srcImageLayout ), 4730 static_cast<VkBuffer>( dstBuffer ), 4731 regions.size(), 4732 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 4733 } 4734 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4735 4736 template <typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize dataSize,const void * pData,Dispatch const & d) const4737 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4738 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4739 VULKAN_HPP_NAMESPACE::DeviceSize dataSize, 4740 const void * pData, 4741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4742 { 4743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4744 d.vkCmdUpdateBuffer( 4745 m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData ); 4746 } 4747 4748 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4749 template <typename DataType, typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,Dispatch const & d) const4750 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4751 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4752 VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, 4753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4754 { 4755 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4756 4757 d.vkCmdUpdateBuffer( m_commandBuffer, 4758 static_cast<VkBuffer>( dstBuffer ), 4759 static_cast<VkDeviceSize>( dstOffset ), 4760 data.size() * sizeof( DataType ), 4761 reinterpret_cast<const void *>( data.data() ) ); 4762 } 4763 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4764 4765 template <typename Dispatch> fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize size,uint32_t data,Dispatch const & d) const4766 VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4767 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4768 VULKAN_HPP_NAMESPACE::DeviceSize size, 4769 uint32_t data, 4770 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4771 { 4772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4773 d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data ); 4774 } 4775 4776 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const4777 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 4778 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4779 const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, 4780 uint32_t rangeCount, 4781 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 4782 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4783 { 4784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4785 d.vkCmdClearColorImage( m_commandBuffer, 4786 static_cast<VkImage>( image ), 4787 static_cast<VkImageLayout>( imageLayout ), 4788 reinterpret_cast<const VkClearColorValue *>( pColor ), 4789 rangeCount, 4790 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 4791 } 4792 4793 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4794 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue & color,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const4795 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 4796 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4797 const VULKAN_HPP_NAMESPACE::ClearColorValue & color, 4798 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 4799 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4800 { 4801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4802 4803 d.vkCmdClearColorImage( m_commandBuffer, 4804 static_cast<VkImage>( image ), 4805 static_cast<VkImageLayout>( imageLayout ), 4806 reinterpret_cast<const VkClearColorValue *>( &color ), 4807 ranges.size(), 4808 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 4809 } 4810 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4811 4812 template <typename Dispatch> clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const4813 VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 4814 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4815 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, 4816 uint32_t rangeCount, 4817 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 4818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4819 { 4820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4821 d.vkCmdClearDepthStencilImage( m_commandBuffer, 4822 static_cast<VkImage>( image ), 4823 static_cast<VkImageLayout>( imageLayout ), 4824 reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), 4825 rangeCount, 4826 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 4827 } 4828 4829 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4830 template <typename Dispatch> 4831 VULKAN_HPP_INLINE void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const4832 CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 4833 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4834 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, 4835 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 4836 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4837 { 4838 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4839 4840 d.vkCmdClearDepthStencilImage( m_commandBuffer, 4841 static_cast<VkImage>( image ), 4842 static_cast<VkImageLayout>( imageLayout ), 4843 reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), 4844 ranges.size(), 4845 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 4846 } 4847 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4848 4849 template <typename Dispatch> clearAttachments(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,uint32_t rectCount,const VULKAN_HPP_NAMESPACE::ClearRect * pRects,Dispatch const & d) const4850 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, 4851 const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, 4852 uint32_t rectCount, 4853 const VULKAN_HPP_NAMESPACE::ClearRect * pRects, 4854 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4855 { 4856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4857 d.vkCmdClearAttachments( m_commandBuffer, 4858 attachmentCount, 4859 reinterpret_cast<const VkClearAttachment *>( pAttachments ), 4860 rectCount, 4861 reinterpret_cast<const VkClearRect *>( pRects ) ); 4862 } 4863 4864 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4865 template <typename Dispatch> clearAttachments(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,Dispatch const & d) const4866 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, 4867 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, 4868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4869 { 4870 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4871 4872 d.vkCmdClearAttachments( m_commandBuffer, 4873 attachments.size(), 4874 reinterpret_cast<const VkClearAttachment *>( attachments.data() ), 4875 rects.size(), 4876 reinterpret_cast<const VkClearRect *>( rects.data() ) ); 4877 } 4878 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4879 4880 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,Dispatch const & d) const4881 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4882 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4883 VULKAN_HPP_NAMESPACE::Image dstImage, 4884 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4885 uint32_t regionCount, 4886 const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, 4887 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4888 { 4889 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4890 d.vkCmdResolveImage( m_commandBuffer, 4891 static_cast<VkImage>( srcImage ), 4892 static_cast<VkImageLayout>( srcImageLayout ), 4893 static_cast<VkImage>( dstImage ), 4894 static_cast<VkImageLayout>( dstImageLayout ), 4895 regionCount, 4896 reinterpret_cast<const VkImageResolve *>( pRegions ) ); 4897 } 4898 4899 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4900 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,Dispatch const & d) const4901 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4902 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4903 VULKAN_HPP_NAMESPACE::Image dstImage, 4904 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4905 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, 4906 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4907 { 4908 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4909 4910 d.vkCmdResolveImage( m_commandBuffer, 4911 static_cast<VkImage>( srcImage ), 4912 static_cast<VkImageLayout>( srcImageLayout ), 4913 static_cast<VkImage>( dstImage ), 4914 static_cast<VkImageLayout>( dstImageLayout ), 4915 regions.size(), 4916 reinterpret_cast<const VkImageResolve *>( regions.data() ) ); 4917 } 4918 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4919 4920 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const4921 VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, 4922 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 4923 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4924 { 4925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4926 d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 4927 } 4928 4929 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const4930 VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 4931 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 4932 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4933 { 4934 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4935 d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 4936 } 4937 4938 template <typename Dispatch> waitEvents(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const4939 VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, 4940 const VULKAN_HPP_NAMESPACE::Event * pEvents, 4941 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4942 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4943 uint32_t memoryBarrierCount, 4944 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 4945 uint32_t bufferMemoryBarrierCount, 4946 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 4947 uint32_t imageMemoryBarrierCount, 4948 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 4949 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4950 { 4951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4952 d.vkCmdWaitEvents( m_commandBuffer, 4953 eventCount, 4954 reinterpret_cast<const VkEvent *>( pEvents ), 4955 static_cast<VkPipelineStageFlags>( srcStageMask ), 4956 static_cast<VkPipelineStageFlags>( dstStageMask ), 4957 memoryBarrierCount, 4958 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 4959 bufferMemoryBarrierCount, 4960 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 4961 imageMemoryBarrierCount, 4962 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 4963 } 4964 4965 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4966 template <typename Dispatch> 4967 VULKAN_HPP_INLINE void waitEvents(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const4968 CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 4969 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4970 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4971 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 4972 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 4973 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 4974 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4975 { 4976 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 4977 4978 d.vkCmdWaitEvents( m_commandBuffer, 4979 events.size(), 4980 reinterpret_cast<const VkEvent *>( events.data() ), 4981 static_cast<VkPipelineStageFlags>( srcStageMask ), 4982 static_cast<VkPipelineStageFlags>( dstStageMask ), 4983 memoryBarriers.size(), 4984 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 4985 bufferMemoryBarriers.size(), 4986 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 4987 imageMemoryBarriers.size(), 4988 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 4989 } 4990 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 4991 4992 template <typename Dispatch> pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const4993 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4994 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4995 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 4996 uint32_t memoryBarrierCount, 4997 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 4998 uint32_t bufferMemoryBarrierCount, 4999 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 5000 uint32_t imageMemoryBarrierCount, 5001 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 5002 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5003 { 5004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5005 d.vkCmdPipelineBarrier( m_commandBuffer, 5006 static_cast<VkPipelineStageFlags>( srcStageMask ), 5007 static_cast<VkPipelineStageFlags>( dstStageMask ), 5008 static_cast<VkDependencyFlags>( dependencyFlags ), 5009 memoryBarrierCount, 5010 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 5011 bufferMemoryBarrierCount, 5012 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 5013 imageMemoryBarrierCount, 5014 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 5015 } 5016 5017 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5018 template <typename Dispatch> 5019 VULKAN_HPP_INLINE void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const5020 CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 5021 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 5022 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 5023 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 5024 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 5025 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 5026 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5027 { 5028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5029 5030 d.vkCmdPipelineBarrier( m_commandBuffer, 5031 static_cast<VkPipelineStageFlags>( srcStageMask ), 5032 static_cast<VkPipelineStageFlags>( dstStageMask ), 5033 static_cast<VkDependencyFlags>( dependencyFlags ), 5034 memoryBarriers.size(), 5035 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 5036 bufferMemoryBarriers.size(), 5037 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 5038 imageMemoryBarriers.size(), 5039 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 5040 } 5041 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5042 5043 template <typename Dispatch> beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,Dispatch const & d) const5044 VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5045 uint32_t query, 5046 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 5047 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5048 { 5049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5050 d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); 5051 } 5052 5053 template <typename Dispatch> endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5054 VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5055 { 5056 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5057 d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query ); 5058 } 5059 5060 template <typename Dispatch> resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const5061 VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5062 uint32_t firstQuery, 5063 uint32_t queryCount, 5064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5065 { 5066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5067 d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 5068 } 5069 5070 template <typename Dispatch> writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const5071 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 5072 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5073 uint32_t query, 5074 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5075 { 5076 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5077 d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); 5078 } 5079 5080 template <typename Dispatch> copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const5081 VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 5082 uint32_t firstQuery, 5083 uint32_t queryCount, 5084 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 5085 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 5086 VULKAN_HPP_NAMESPACE::DeviceSize stride, 5087 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 5088 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5089 { 5090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5091 d.vkCmdCopyQueryPoolResults( m_commandBuffer, 5092 static_cast<VkQueryPool>( queryPool ), 5093 firstQuery, 5094 queryCount, 5095 static_cast<VkBuffer>( dstBuffer ), 5096 static_cast<VkDeviceSize>( dstOffset ), 5097 static_cast<VkDeviceSize>( stride ), 5098 static_cast<VkQueryResultFlags>( flags ) ); 5099 } 5100 5101 template <typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues,Dispatch const & d) const5102 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5103 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5104 uint32_t offset, 5105 uint32_t size, 5106 const void * pValues, 5107 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5108 { 5109 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5110 d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues ); 5111 } 5112 5113 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5114 template <typename ValuesType, typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,Dispatch const & d) const5115 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5116 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5117 uint32_t offset, 5118 VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, 5119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5120 { 5121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5122 5123 d.vkCmdPushConstants( m_commandBuffer, 5124 static_cast<VkPipelineLayout>( layout ), 5125 static_cast<VkShaderStageFlags>( stageFlags ), 5126 offset, 5127 values.size() * sizeof( ValuesType ), 5128 reinterpret_cast<const void *>( values.data() ) ); 5129 } 5130 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5131 5132 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5133 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 5134 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5135 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5136 { 5137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5138 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5139 } 5140 5141 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5142 template <typename Dispatch> beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5143 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 5144 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5146 { 5147 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5148 5149 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) ); 5150 } 5151 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5152 5153 template <typename Dispatch> nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5154 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5155 { 5156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5157 d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) ); 5158 } 5159 5160 template <typename Dispatch> endRenderPass(Dispatch const & d) const5161 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5162 { 5163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5164 d.vkCmdEndRenderPass( m_commandBuffer ); 5165 } 5166 5167 template <typename Dispatch> executeCommands(uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const5168 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, 5169 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 5170 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5171 { 5172 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5173 d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 5174 } 5175 5176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5177 template <typename Dispatch> executeCommands(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const5178 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 5179 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5180 { 5181 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5182 5183 d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 5184 } 5185 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5186 5187 //=== VK_VERSION_1_1 === 5188 5189 template <typename Dispatch> enumerateInstanceVersion(uint32_t * pApiVersion,Dispatch const & d)5190 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT 5191 { 5192 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5193 return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); 5194 } 5195 5196 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5197 template <typename Dispatch> enumerateInstanceVersion(Dispatch const & d)5198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) 5199 { 5200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5201 5202 uint32_t apiVersion; 5203 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumerateInstanceVersion( &apiVersion ) ); 5204 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); 5205 5206 return createResultValueType( result, apiVersion ); 5207 } 5208 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5209 5210 template <typename Dispatch> bindBufferMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const5211 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, 5212 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 5213 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5214 { 5215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5216 return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 5217 } 5218 5219 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5220 template <typename Dispatch> 5221 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const5222 Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const 5223 { 5224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5225 5226 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5227 d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 5228 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); 5229 5230 return createResultValueType( result ); 5231 } 5232 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5233 5234 template <typename Dispatch> bindImageMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const5235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, 5236 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 5237 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5238 { 5239 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5240 return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 5241 } 5242 5243 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5244 template <typename Dispatch> 5245 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const5246 Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 5247 { 5248 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5249 5250 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5251 d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 5252 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); 5253 5254 return createResultValueType( result ); 5255 } 5256 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5257 5258 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const5259 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, 5260 uint32_t localDeviceIndex, 5261 uint32_t remoteDeviceIndex, 5262 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 5263 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5264 { 5265 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5266 d.vkGetDeviceGroupPeerMemoryFeatures( 5267 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 5268 } 5269 5270 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5271 template <typename Dispatch> getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const5272 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( 5273 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5274 { 5275 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5276 5277 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 5278 d.vkGetDeviceGroupPeerMemoryFeatures( 5279 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 5280 5281 return peerMemoryFeatures; 5282 } 5283 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5284 5285 template <typename Dispatch> setDeviceMask(uint32_t deviceMask,Dispatch const & d) const5286 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5287 { 5288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5289 d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); 5290 } 5291 5292 template <typename Dispatch> dispatchBase(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5293 VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, 5294 uint32_t baseGroupY, 5295 uint32_t baseGroupZ, 5296 uint32_t groupCountX, 5297 uint32_t groupCountY, 5298 uint32_t groupCountZ, 5299 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5300 { 5301 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5302 d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 5303 } 5304 5305 template <typename Dispatch> 5306 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroups(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const5307 Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, 5308 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 5309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5310 { 5311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5312 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( 5313 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 5314 } 5315 5316 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5317 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 5318 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5319 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(Dispatch const & d) const5320 Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const 5321 { 5322 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5323 5324 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 5325 uint32_t physicalDeviceGroupCount; 5326 VULKAN_HPP_NAMESPACE::Result result; 5327 do 5328 { 5329 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5330 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 5331 { 5332 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5333 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 5334 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5335 } 5336 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 5337 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5338 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5339 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 5340 { 5341 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5342 } 5343 return createResultValueType( result, physicalDeviceGroupProperties ); 5344 } 5345 5346 template <typename PhysicalDeviceGroupPropertiesAllocator, 5347 typename Dispatch, 5348 typename B1, 5349 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, int>::type> 5350 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5351 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const5352 Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 5353 { 5354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5355 5356 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 5357 physicalDeviceGroupPropertiesAllocator ); 5358 uint32_t physicalDeviceGroupCount; 5359 VULKAN_HPP_NAMESPACE::Result result; 5360 do 5361 { 5362 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5363 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 5364 { 5365 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5366 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroups( 5367 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5368 } 5369 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 5370 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5371 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5372 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 5373 { 5374 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5375 } 5376 return createResultValueType( result, physicalDeviceGroupProperties ); 5377 } 5378 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5379 5380 template <typename Dispatch> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5381 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 5382 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5383 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5384 { 5385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5386 d.vkGetImageMemoryRequirements2( 5387 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5388 } 5389 5390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5391 template <typename Dispatch> 5392 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5393 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5394 { 5395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5396 5397 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5398 d.vkGetImageMemoryRequirements2( 5399 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5400 5401 return memoryRequirements; 5402 } 5403 5404 template <typename X, typename Y, typename... Z, typename Dispatch> 5405 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5406 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5407 { 5408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5409 5410 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5411 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 5412 d.vkGetImageMemoryRequirements2( 5413 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5414 5415 return structureChain; 5416 } 5417 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5418 5419 template <typename Dispatch> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5420 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 5421 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5422 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5423 { 5424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5425 d.vkGetBufferMemoryRequirements2( 5426 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5427 } 5428 5429 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5430 template <typename Dispatch> 5431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const5432 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5433 { 5434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5435 5436 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5437 d.vkGetBufferMemoryRequirements2( 5438 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5439 5440 return memoryRequirements; 5441 } 5442 5443 template <typename X, typename Y, typename... Z, typename Dispatch> 5444 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const5445 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5446 { 5447 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5448 5449 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5450 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 5451 d.vkGetBufferMemoryRequirements2( 5452 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5453 5454 return structureChain; 5455 } 5456 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5457 5458 template <typename Dispatch> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const5459 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 5460 uint32_t * pSparseMemoryRequirementCount, 5461 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 5462 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5463 { 5464 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5465 d.vkGetImageSparseMemoryRequirements2( m_device, 5466 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 5467 pSparseMemoryRequirementCount, 5468 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 5469 } 5470 5471 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5472 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 5473 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const5474 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 5475 { 5476 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5477 5478 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 5479 uint32_t sparseMemoryRequirementCount; 5480 d.vkGetImageSparseMemoryRequirements2( 5481 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 5482 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 5483 d.vkGetImageSparseMemoryRequirements2( m_device, 5484 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 5485 &sparseMemoryRequirementCount, 5486 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 5487 5488 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 5489 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 5490 { 5491 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 5492 } 5493 return sparseMemoryRequirements; 5494 } 5495 5496 template <typename SparseImageMemoryRequirements2Allocator, 5497 typename Dispatch, 5498 typename B1, 5499 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, int>::type> 5500 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const5501 Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 5502 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 5503 Dispatch const & d ) const 5504 { 5505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5506 5507 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 5508 sparseImageMemoryRequirements2Allocator ); 5509 uint32_t sparseMemoryRequirementCount; 5510 d.vkGetImageSparseMemoryRequirements2( 5511 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 5512 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 5513 d.vkGetImageSparseMemoryRequirements2( m_device, 5514 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 5515 &sparseMemoryRequirementCount, 5516 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 5517 5518 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 5519 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 5520 { 5521 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 5522 } 5523 return sparseMemoryRequirements; 5524 } 5525 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5526 5527 template <typename Dispatch> getFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const5528 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5529 { 5530 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5531 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 5532 } 5533 5534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5535 template <typename Dispatch> 5536 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const & d) const5537 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5538 { 5539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5540 5541 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 5542 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 5543 5544 return features; 5545 } 5546 5547 template <typename X, typename Y, typename... Z, typename Dispatch> 5548 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2(Dispatch const & d) const5549 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5550 { 5551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5552 5553 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5554 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 5555 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 5556 5557 return structureChain; 5558 } 5559 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5560 5561 template <typename Dispatch> getProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const5562 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 5563 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5564 { 5565 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5566 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 5567 } 5568 5569 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5570 template <typename Dispatch> 5571 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const & d) const5572 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5573 { 5574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5575 5576 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 5577 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 5578 5579 return properties; 5580 } 5581 5582 template <typename X, typename Y, typename... Z, typename Dispatch> 5583 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2(Dispatch const & d) const5584 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5585 { 5586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5587 5588 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5589 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 5590 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 5591 5592 return structureChain; 5593 } 5594 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5595 5596 template <typename Dispatch> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const5597 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 5598 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 5599 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5600 { 5601 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5602 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 5603 } 5604 5605 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5606 template <typename Dispatch> 5607 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const5608 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5609 { 5610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5611 5612 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 5613 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 5614 5615 return formatProperties; 5616 } 5617 5618 template <typename X, typename Y, typename... Z, typename Dispatch> 5619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const5620 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5621 { 5622 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5623 5624 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5625 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 5626 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 5627 5628 return structureChain; 5629 } 5630 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5631 5632 template <typename Dispatch> 5633 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const5634 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 5635 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 5636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5637 { 5638 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5639 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 5640 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 5641 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 5642 } 5643 5644 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5645 template <typename Dispatch> 5646 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const5647 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 5648 { 5649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5650 5651 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 5652 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5653 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 5654 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 5655 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 5656 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 5657 5658 return createResultValueType( result, imageFormatProperties ); 5659 } 5660 5661 template <typename X, typename Y, typename... Z, typename Dispatch> 5662 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const5663 PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 5664 { 5665 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5666 5667 StructureChain<X, Y, Z...> structureChain; 5668 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 5669 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 5670 d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, 5671 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 5672 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 5673 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 5674 5675 return createResultValueType( result, structureChain ); 5676 } 5677 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5678 5679 template <typename Dispatch> getQueueFamilyProperties2(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const5680 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, 5681 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 5682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5683 { 5684 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5685 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5686 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 5687 } 5688 5689 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5690 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 5691 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(Dispatch const & d) const5692 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 5693 { 5694 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5695 5696 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 5697 uint32_t queueFamilyPropertyCount; 5698 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5699 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5700 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5701 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5702 5703 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5704 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 5705 { 5706 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5707 } 5708 return queueFamilyProperties; 5709 } 5710 5711 template <typename QueueFamilyProperties2Allocator, 5712 typename Dispatch, 5713 typename B1, 5714 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 5715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const5716 PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 5717 { 5718 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5719 5720 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 5721 uint32_t queueFamilyPropertyCount; 5722 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5723 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5724 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5725 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5726 5727 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5728 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 5729 { 5730 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5731 } 5732 return queueFamilyProperties; 5733 } 5734 5735 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 5736 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(Dispatch const & d) const5737 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 5738 { 5739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5740 5741 std::vector<StructureChain, StructureChainAllocator> structureChains; 5742 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 5743 uint32_t queueFamilyPropertyCount; 5744 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5745 structureChains.resize( queueFamilyPropertyCount ); 5746 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5747 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5748 { 5749 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 5750 } 5751 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5752 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5753 5754 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5755 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 5756 { 5757 structureChains.resize( queueFamilyPropertyCount ); 5758 } 5759 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5760 { 5761 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 5762 } 5763 return structureChains; 5764 } 5765 5766 template <typename StructureChain, 5767 typename StructureChainAllocator, 5768 typename Dispatch, 5769 typename B1, 5770 typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type> 5771 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const5772 PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 5773 { 5774 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5775 5776 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 5777 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 5778 uint32_t queueFamilyPropertyCount; 5779 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5780 structureChains.resize( queueFamilyPropertyCount ); 5781 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5782 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5783 { 5784 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 5785 } 5786 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5787 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5788 5789 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5790 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 5791 { 5792 structureChains.resize( queueFamilyPropertyCount ); 5793 } 5794 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5795 { 5796 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 5797 } 5798 return structureChains; 5799 } 5800 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5801 5802 template <typename Dispatch> getMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const5803 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 5804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5805 { 5806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5807 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 5808 } 5809 5810 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5811 template <typename Dispatch> 5812 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const & d) const5813 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5814 { 5815 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5816 5817 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 5818 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 5819 5820 return memoryProperties; 5821 } 5822 5823 template <typename X, typename Y, typename... Z, typename Dispatch> 5824 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const & d) const5825 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5826 { 5827 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5828 5829 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 5830 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 5831 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 5832 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 5833 5834 return structureChain; 5835 } 5836 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5837 5838 template <typename Dispatch> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const5839 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 5840 uint32_t * pPropertyCount, 5841 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 5842 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5843 { 5844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5845 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 5846 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 5847 pPropertyCount, 5848 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 5849 } 5850 5851 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5852 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 5853 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const5854 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const 5855 { 5856 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5857 5858 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 5859 uint32_t propertyCount; 5860 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5861 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 5862 properties.resize( propertyCount ); 5863 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 5864 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 5865 &propertyCount, 5866 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 5867 5868 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 5869 if ( propertyCount < properties.size() ) 5870 { 5871 properties.resize( propertyCount ); 5872 } 5873 return properties; 5874 } 5875 5876 template <typename SparseImageFormatProperties2Allocator, 5877 typename Dispatch, 5878 typename B1, 5879 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, int>::type> 5880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const5881 PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 5882 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 5883 Dispatch const & d ) const 5884 { 5885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5886 5887 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 5888 uint32_t propertyCount; 5889 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5890 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 5891 properties.resize( propertyCount ); 5892 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, 5893 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 5894 &propertyCount, 5895 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 5896 5897 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 5898 if ( propertyCount < properties.size() ) 5899 { 5900 properties.resize( propertyCount ); 5901 } 5902 return properties; 5903 } 5904 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5905 5906 template <typename Dispatch> trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const5907 VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 5908 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 5909 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5910 { 5911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5912 d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 5913 } 5914 5915 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const5916 VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, 5917 VULKAN_HPP_NAMESPACE::Queue * pQueue, 5918 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5919 { 5920 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5921 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); 5922 } 5923 5924 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5925 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,Dispatch const & d) const5926 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, 5927 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5928 { 5929 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5930 5931 VULKAN_HPP_NAMESPACE::Queue queue; 5932 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); 5933 5934 return queue; 5935 } 5936 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5937 5938 template <typename Dispatch> 5939 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const5940 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 5941 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5942 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 5943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5944 { 5945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5946 return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, 5947 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 5948 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 5949 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 5950 } 5951 5952 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5953 template <typename Dispatch> 5954 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const5955 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 5956 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 5957 Dispatch const & d ) const 5958 { 5959 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5960 5961 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 5962 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 5963 m_device, 5964 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 5965 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 5966 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 5967 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); 5968 5969 return createResultValueType( result, ycbcrConversion ); 5970 } 5971 5972 # ifndef VULKAN_HPP_NO_SMART_HANDLE 5973 template <typename Dispatch> 5974 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const5975 Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 5976 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 5977 Dispatch const & d ) const 5978 { 5979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 5980 5981 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 5982 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversion( 5983 m_device, 5984 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 5985 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 5986 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 5987 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); 5988 5989 return createResultValueType( 5990 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 5991 } 5992 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 5993 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 5994 5995 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const5996 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 5997 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5998 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5999 { 6000 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6001 d.vkDestroySamplerYcbcrConversion( 6002 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6003 } 6004 6005 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6006 template <typename Dispatch> destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6007 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6008 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6009 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6010 { 6011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6012 6013 d.vkDestroySamplerYcbcrConversion( 6014 m_device, 6015 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6016 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6017 } 6018 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6019 6020 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6021 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6022 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6023 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6024 { 6025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6026 d.vkDestroySamplerYcbcrConversion( 6027 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6028 } 6029 6030 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6031 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6032 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 6033 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6035 { 6036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6037 6038 d.vkDestroySamplerYcbcrConversion( 6039 m_device, 6040 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 6041 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6042 } 6043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6044 6045 template <typename Dispatch> 6046 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const6047 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 6048 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6049 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 6050 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6051 { 6052 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6053 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, 6054 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 6055 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6056 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 6057 } 6058 6059 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6060 template <typename Dispatch> 6061 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6062 Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6063 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6064 Dispatch const & d ) const 6065 { 6066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6067 6068 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6069 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6070 m_device, 6071 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6072 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6073 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6074 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); 6075 6076 return createResultValueType( result, descriptorUpdateTemplate ); 6077 } 6078 6079 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6080 template <typename Dispatch> 6081 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6082 Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 6083 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6084 Dispatch const & d ) const 6085 { 6086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6087 6088 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 6089 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplate( 6090 m_device, 6091 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 6092 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6093 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 6094 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); 6095 6096 return createResultValueType( result, 6097 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 6098 descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6099 } 6100 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6101 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6102 6103 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6104 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6105 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6107 { 6108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6109 d.vkDestroyDescriptorUpdateTemplate( 6110 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6111 } 6112 6113 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6114 template <typename Dispatch> destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6115 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6116 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6117 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6118 { 6119 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6120 6121 d.vkDestroyDescriptorUpdateTemplate( 6122 m_device, 6123 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6124 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6125 } 6126 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6127 6128 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6129 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6130 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6131 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6132 { 6133 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6134 d.vkDestroyDescriptorUpdateTemplate( 6135 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6136 } 6137 6138 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6139 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6140 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6141 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6142 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6143 { 6144 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6145 6146 d.vkDestroyDescriptorUpdateTemplate( 6147 m_device, 6148 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6149 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6150 } 6151 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6152 6153 template <typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const6154 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6155 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6156 const void * pData, 6157 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6158 { 6159 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6160 d.vkUpdateDescriptorSetWithTemplate( 6161 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); 6162 } 6163 6164 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6165 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const6166 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6167 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6168 DataType const & data, 6169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6170 { 6171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6172 6173 d.vkUpdateDescriptorSetWithTemplate( m_device, 6174 static_cast<VkDescriptorSet>( descriptorSet ), 6175 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6176 reinterpret_cast<const void *>( &data ) ); 6177 } 6178 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6179 6180 template <typename Dispatch> getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const6181 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 6182 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 6183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6184 { 6185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6186 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 6187 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 6188 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 6189 } 6190 6191 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6192 template <typename Dispatch> 6193 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const6194 PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 6195 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6196 { 6197 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6198 6199 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 6200 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, 6201 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 6202 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 6203 6204 return externalBufferProperties; 6205 } 6206 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6207 6208 template <typename Dispatch> getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const6209 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 6210 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 6211 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6212 { 6213 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6214 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 6215 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 6216 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 6217 } 6218 6219 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6220 template <typename Dispatch> 6221 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const6222 PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 6223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6224 { 6225 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6226 6227 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 6228 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, 6229 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 6230 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 6231 6232 return externalFenceProperties; 6233 } 6234 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6235 6236 template <typename Dispatch> 6237 VULKAN_HPP_INLINE void getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const6238 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 6239 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 6240 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6241 { 6242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6243 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 6244 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 6245 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 6246 } 6247 6248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6249 template <typename Dispatch> 6250 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const6251 PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 6252 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6253 { 6254 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6255 6256 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 6257 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, 6258 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 6259 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 6260 6261 return externalSemaphoreProperties; 6262 } 6263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6264 6265 template <typename Dispatch> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const6266 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 6267 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 6268 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6269 { 6270 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6271 d.vkGetDescriptorSetLayoutSupport( 6272 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 6273 } 6274 6275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6276 template <typename Dispatch> 6277 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6278 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 6279 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6280 { 6281 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6282 6283 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 6284 d.vkGetDescriptorSetLayoutSupport( 6285 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6286 6287 return support; 6288 } 6289 6290 template <typename X, typename Y, typename... Z, typename Dispatch> 6291 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6292 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 6293 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6294 { 6295 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6296 6297 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 6298 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 6299 d.vkGetDescriptorSetLayoutSupport( 6300 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6301 6302 return structureChain; 6303 } 6304 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6305 6306 //=== VK_VERSION_1_2 === 6307 6308 template <typename Dispatch> drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const6309 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 6310 VULKAN_HPP_NAMESPACE::DeviceSize offset, 6311 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 6312 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 6313 uint32_t maxDrawCount, 6314 uint32_t stride, 6315 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6316 { 6317 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6318 d.vkCmdDrawIndirectCount( m_commandBuffer, 6319 static_cast<VkBuffer>( buffer ), 6320 static_cast<VkDeviceSize>( offset ), 6321 static_cast<VkBuffer>( countBuffer ), 6322 static_cast<VkDeviceSize>( countBufferOffset ), 6323 maxDrawCount, 6324 stride ); 6325 } 6326 6327 template <typename Dispatch> drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const6328 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 6329 VULKAN_HPP_NAMESPACE::DeviceSize offset, 6330 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 6331 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 6332 uint32_t maxDrawCount, 6333 uint32_t stride, 6334 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6335 { 6336 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6337 d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, 6338 static_cast<VkBuffer>( buffer ), 6339 static_cast<VkDeviceSize>( offset ), 6340 static_cast<VkBuffer>( countBuffer ), 6341 static_cast<VkDeviceSize>( countBufferOffset ), 6342 maxDrawCount, 6343 stride ); 6344 } 6345 6346 template <typename Dispatch> createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const6347 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 6348 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6349 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 6350 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6351 { 6352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6353 return static_cast<Result>( d.vkCreateRenderPass2( m_device, 6354 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 6355 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6356 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 6357 } 6358 6359 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6360 template <typename Dispatch> 6361 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6362 Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 6363 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6364 Dispatch const & d ) const 6365 { 6366 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6367 6368 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 6369 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6370 d.vkCreateRenderPass2( m_device, 6371 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 6372 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6373 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 6374 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); 6375 6376 return createResultValueType( result, renderPass ); 6377 } 6378 6379 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6380 template <typename Dispatch> 6381 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6382 Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 6383 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6384 Dispatch const & d ) const 6385 { 6386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6387 6388 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 6389 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6390 d.vkCreateRenderPass2( m_device, 6391 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 6392 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6393 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 6394 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); 6395 6396 return createResultValueType( 6397 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6398 } 6399 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6400 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6401 6402 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const6403 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 6404 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 6405 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6406 { 6407 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6408 d.vkCmdBeginRenderPass2( 6409 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 6410 } 6411 6412 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6413 template <typename Dispatch> beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const6414 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 6415 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 6416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6417 { 6418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6419 6420 d.vkCmdBeginRenderPass2( 6421 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 6422 } 6423 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6424 6425 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const6426 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 6427 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 6428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6429 { 6430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6431 d.vkCmdNextSubpass2( 6432 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 6433 } 6434 6435 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6436 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const6437 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 6438 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 6439 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6440 { 6441 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6442 6443 d.vkCmdNextSubpass2( 6444 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 6445 } 6446 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6447 6448 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const6449 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 6450 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6451 { 6452 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6453 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 6454 } 6455 6456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6457 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const6458 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 6459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6460 { 6461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6462 6463 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 6464 } 6465 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6466 6467 template <typename Dispatch> 6468 VULKAN_HPP_INLINE void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const6469 Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6470 { 6471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6472 d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 6473 } 6474 6475 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const6476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 6477 uint64_t * pValue, 6478 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6479 { 6480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6481 return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 6482 } 6483 6484 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6485 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const6486 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 6487 Dispatch const & d ) const 6488 { 6489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6490 6491 uint64_t value; 6492 VULKAN_HPP_NAMESPACE::Result result = 6493 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 6494 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); 6495 6496 return createResultValueType( result, value ); 6497 } 6498 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6499 6500 template <typename Dispatch> waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const6501 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 6502 uint64_t timeout, 6503 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6504 { 6505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6506 return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 6507 } 6508 6509 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6510 template <typename Dispatch> 6511 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const6512 Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 6513 { 6514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6515 6516 VULKAN_HPP_NAMESPACE::Result result = 6517 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 6518 resultCheck( 6519 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 6520 6521 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 6522 } 6523 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6524 6525 template <typename Dispatch> signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const6526 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 6527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6528 { 6529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6530 return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 6531 } 6532 6533 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6534 template <typename Dispatch> 6535 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const6536 Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 6537 { 6538 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6539 6540 VULKAN_HPP_NAMESPACE::Result result = 6541 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 6542 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); 6543 6544 return createResultValueType( result ); 6545 } 6546 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6547 6548 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const6549 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 6550 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6551 { 6552 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6553 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 6554 } 6555 6556 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6557 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const6558 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 6559 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6560 { 6561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6562 6563 VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 6564 6565 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 6566 } 6567 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6568 6569 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const6570 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 6571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6572 { 6573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6574 return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 6575 } 6576 6577 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6578 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const6579 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 6580 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6581 { 6582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6583 6584 uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 6585 6586 return result; 6587 } 6588 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6589 6590 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const6591 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 6592 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6593 { 6594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6595 return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 6596 } 6597 6598 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6599 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const6600 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 6601 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6602 { 6603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6604 6605 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 6606 6607 return result; 6608 } 6609 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6610 6611 //=== VK_VERSION_1_3 === 6612 6613 template <typename Dispatch> getToolProperties(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const6614 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, 6615 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 6616 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6617 { 6618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6619 return static_cast<Result>( 6620 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 6621 } 6622 6623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6624 template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> 6625 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6626 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(Dispatch const & d) const6627 PhysicalDevice::getToolProperties( Dispatch const & d ) const 6628 { 6629 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6630 6631 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 6632 uint32_t toolCount; 6633 VULKAN_HPP_NAMESPACE::Result result; 6634 do 6635 { 6636 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 6637 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 6638 { 6639 toolProperties.resize( toolCount ); 6640 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6641 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 6642 } 6643 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 6644 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 6645 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 6646 if ( toolCount < toolProperties.size() ) 6647 { 6648 toolProperties.resize( toolCount ); 6649 } 6650 return createResultValueType( result, toolProperties ); 6651 } 6652 6653 template <typename PhysicalDeviceToolPropertiesAllocator, 6654 typename Dispatch, 6655 typename B1, 6656 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, int>::type> 6657 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6658 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolProperties(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const6659 PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 6660 { 6661 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6662 6663 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 6664 physicalDeviceToolPropertiesAllocator ); 6665 uint32_t toolCount; 6666 VULKAN_HPP_NAMESPACE::Result result; 6667 do 6668 { 6669 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); 6670 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 6671 { 6672 toolProperties.resize( toolCount ); 6673 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6674 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 6675 } 6676 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 6677 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); 6678 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 6679 if ( toolCount < toolProperties.size() ) 6680 { 6681 toolProperties.resize( toolCount ); 6682 } 6683 return createResultValueType( result, toolProperties ); 6684 } 6685 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6686 6687 template <typename Dispatch> createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const6688 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 6689 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6690 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 6691 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6692 { 6693 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6694 return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, 6695 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 6696 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6697 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 6698 } 6699 6700 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6701 template <typename Dispatch> 6702 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6703 Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 6704 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6705 Dispatch const & d ) const 6706 { 6707 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6708 6709 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 6710 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6711 d.vkCreatePrivateDataSlot( m_device, 6712 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 6713 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6714 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 6715 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); 6716 6717 return createResultValueType( result, privateDataSlot ); 6718 } 6719 6720 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6721 template <typename Dispatch> 6722 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6723 Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 6724 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6725 Dispatch const & d ) const 6726 { 6727 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6728 6729 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 6730 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6731 d.vkCreatePrivateDataSlot( m_device, 6732 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 6733 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6734 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 6735 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); 6736 6737 return createResultValueType( 6738 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 6739 } 6740 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 6741 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6742 6743 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6744 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6745 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6746 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6747 { 6748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6749 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6750 } 6751 6752 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6753 template <typename Dispatch> destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6754 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6755 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6756 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6757 { 6758 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6759 6760 d.vkDestroyPrivateDataSlot( 6761 m_device, 6762 static_cast<VkPrivateDataSlot>( privateDataSlot ), 6763 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6764 } 6765 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6766 6767 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6768 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6769 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6770 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6771 { 6772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6773 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6774 } 6775 6776 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6777 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6778 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6779 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 6780 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6781 { 6782 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6783 6784 d.vkDestroyPrivateDataSlot( 6785 m_device, 6786 static_cast<VkPrivateDataSlot>( privateDataSlot ), 6787 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6788 } 6789 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6790 6791 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 6792 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const6793 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 6794 uint64_t objectHandle, 6795 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6796 uint64_t data, 6797 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6798 { 6799 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6800 return static_cast<Result>( 6801 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 6802 } 6803 #else 6804 template <typename Dispatch> setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const6805 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 6806 uint64_t objectHandle, 6807 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6808 uint64_t data, 6809 Dispatch const & d ) const 6810 { 6811 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6812 6813 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6814 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 6815 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); 6816 6817 return createResultValueType( result ); 6818 } 6819 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6820 6821 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const6822 VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 6823 uint64_t objectHandle, 6824 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6825 uint64_t * pData, 6826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6827 { 6828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6829 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 6830 } 6831 6832 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6833 template <typename Dispatch> getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const6834 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 6835 uint64_t objectHandle, 6836 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 6837 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6838 { 6839 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6840 6841 uint64_t data; 6842 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 6843 6844 return data; 6845 } 6846 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6847 6848 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const6849 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 6850 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 6851 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6852 { 6853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6854 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 6855 } 6856 6857 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6858 template <typename Dispatch> setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const6859 VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, 6860 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 6861 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6862 { 6863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6864 6865 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 6866 } 6867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6868 6869 template <typename Dispatch> resetEvent2(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const6870 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, 6871 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 6872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6873 { 6874 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6875 d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 6876 } 6877 6878 template <typename Dispatch> waitEvents2(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const6879 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, 6880 const VULKAN_HPP_NAMESPACE::Event * pEvents, 6881 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 6882 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6883 { 6884 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6885 d.vkCmdWaitEvents2( 6886 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 6887 } 6888 6889 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6890 template <typename Dispatch> waitEvents2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const6891 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 6892 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 6893 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 6894 { 6895 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6896 # ifdef VULKAN_HPP_NO_EXCEPTIONS 6897 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 6898 # else 6899 if ( events.size() != dependencyInfos.size() ) 6900 { 6901 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); 6902 } 6903 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 6904 6905 d.vkCmdWaitEvents2( m_commandBuffer, 6906 events.size(), 6907 reinterpret_cast<const VkEvent *>( events.data() ), 6908 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 6909 } 6910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6911 6912 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const6913 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 6914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6915 { 6916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6917 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 6918 } 6919 6920 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6921 template <typename Dispatch> pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const6922 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 6923 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6924 { 6925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6926 6927 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 6928 } 6929 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6930 6931 template <typename Dispatch> writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const6932 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 6933 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 6934 uint32_t query, 6935 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6936 { 6937 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6938 d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 6939 } 6940 6941 template <typename Dispatch> submit2(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const6942 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, 6943 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 6944 VULKAN_HPP_NAMESPACE::Fence fence, 6945 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6946 { 6947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6948 return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 6949 } 6950 6951 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6952 template <typename Dispatch> submit2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const6953 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( 6954 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 6955 { 6956 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6957 6958 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 6959 d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 6960 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); 6961 6962 return createResultValueType( result ); 6963 } 6964 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6965 6966 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const6967 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 6968 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6969 { 6970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6971 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 6972 } 6973 6974 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6975 template <typename Dispatch> copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const6976 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 6977 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6978 { 6979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6980 6981 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 6982 } 6983 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 6984 6985 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const6986 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6987 { 6988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6989 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 6990 } 6991 6992 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6993 template <typename Dispatch> copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const6994 VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6995 { 6996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 6997 6998 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 6999 } 7000 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7001 7002 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const7003 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 7004 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7005 { 7006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7007 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 7008 } 7009 7010 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7011 template <typename Dispatch> copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const7012 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 7013 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7014 { 7015 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7016 7017 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 7018 } 7019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7020 7021 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const7022 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 7023 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7024 { 7025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7026 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 7027 } 7028 7029 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7030 template <typename Dispatch> copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const7031 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 7032 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7033 { 7034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7035 7036 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 7037 } 7038 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7039 7040 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const7041 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7042 { 7043 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7044 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 7045 } 7046 7047 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7048 template <typename Dispatch> blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const7049 VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7050 { 7051 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7052 7053 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 7054 } 7055 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7056 7057 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const7058 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 7059 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7060 { 7061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7062 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 7063 } 7064 7065 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7066 template <typename Dispatch> resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const7067 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 7068 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7069 { 7070 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7071 7072 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 7073 } 7074 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7075 7076 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const7077 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 7078 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7079 { 7080 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7081 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 7082 } 7083 7084 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7085 template <typename Dispatch> beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const7086 VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 7087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7088 { 7089 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7090 7091 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 7092 } 7093 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7094 7095 template <typename Dispatch> endRendering(Dispatch const & d) const7096 VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7097 { 7098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7099 d.vkCmdEndRendering( m_commandBuffer ); 7100 } 7101 7102 template <typename Dispatch> setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const7103 VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7104 { 7105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7106 d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 7107 } 7108 7109 template <typename Dispatch> setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const7110 VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7111 { 7112 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7113 d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 7114 } 7115 7116 template <typename Dispatch> setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const7117 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 7118 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7119 { 7120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7121 d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 7122 } 7123 7124 template <typename Dispatch> setViewportWithCount(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const7125 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, 7126 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 7127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7128 { 7129 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7130 d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 7131 } 7132 7133 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7134 template <typename Dispatch> setViewportWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const7135 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 7136 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7137 { 7138 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7139 7140 d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 7141 } 7142 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7143 7144 template <typename Dispatch> 7145 VULKAN_HPP_INLINE void setScissorWithCount(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const7146 CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7147 { 7148 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7149 d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 7150 } 7151 7152 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7153 template <typename Dispatch> setScissorWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const7154 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 7155 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7156 { 7157 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7158 7159 d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 7160 } 7161 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7162 7163 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const7164 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 7165 uint32_t bindingCount, 7166 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 7167 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 7168 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 7169 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 7170 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7171 { 7172 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7173 d.vkCmdBindVertexBuffers2( m_commandBuffer, 7174 firstBinding, 7175 bindingCount, 7176 reinterpret_cast<const VkBuffer *>( pBuffers ), 7177 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 7178 reinterpret_cast<const VkDeviceSize *>( pSizes ), 7179 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 7180 } 7181 7182 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7183 template <typename Dispatch> bindVertexBuffers2(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const7184 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, 7185 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 7186 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 7187 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 7188 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 7189 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 7190 { 7191 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7192 # ifdef VULKAN_HPP_NO_EXCEPTIONS 7193 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 7194 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 7195 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 7196 # else 7197 if ( buffers.size() != offsets.size() ) 7198 { 7199 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); 7200 } 7201 if ( !sizes.empty() && buffers.size() != sizes.size() ) 7202 { 7203 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); 7204 } 7205 if ( !strides.empty() && buffers.size() != strides.size() ) 7206 { 7207 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); 7208 } 7209 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 7210 7211 d.vkCmdBindVertexBuffers2( m_commandBuffer, 7212 firstBinding, 7213 buffers.size(), 7214 reinterpret_cast<const VkBuffer *>( buffers.data() ), 7215 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 7216 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 7217 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 7218 } 7219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7220 7221 template <typename Dispatch> setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const7222 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7223 { 7224 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7225 d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 7226 } 7227 7228 template <typename Dispatch> setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const7229 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7230 { 7231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7232 d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 7233 } 7234 7235 template <typename Dispatch> setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const7236 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7237 { 7238 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7239 d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 7240 } 7241 7242 template <typename Dispatch> setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const7243 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 7244 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7245 { 7246 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7247 d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 7248 } 7249 7250 template <typename Dispatch> setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const7251 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7252 { 7253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7254 d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 7255 } 7256 7257 template <typename Dispatch> setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const7258 VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 7259 VULKAN_HPP_NAMESPACE::StencilOp failOp, 7260 VULKAN_HPP_NAMESPACE::StencilOp passOp, 7261 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 7262 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 7263 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7264 { 7265 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7266 d.vkCmdSetStencilOp( m_commandBuffer, 7267 static_cast<VkStencilFaceFlags>( faceMask ), 7268 static_cast<VkStencilOp>( failOp ), 7269 static_cast<VkStencilOp>( passOp ), 7270 static_cast<VkStencilOp>( depthFailOp ), 7271 static_cast<VkCompareOp>( compareOp ) ); 7272 } 7273 7274 template <typename Dispatch> setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const7275 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 7276 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7277 { 7278 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7279 d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 7280 } 7281 7282 template <typename Dispatch> setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const7283 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7284 { 7285 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7286 d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 7287 } 7288 7289 template <typename Dispatch> setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const7290 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 7291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7292 { 7293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7294 d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 7295 } 7296 7297 template <typename Dispatch> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const7298 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 7299 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 7300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7301 { 7302 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7303 d.vkGetDeviceBufferMemoryRequirements( 7304 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 7305 } 7306 7307 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7308 template <typename Dispatch> 7309 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const7310 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7311 { 7312 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7313 7314 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 7315 d.vkGetDeviceBufferMemoryRequirements( 7316 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 7317 7318 return memoryRequirements; 7319 } 7320 7321 template <typename X, typename Y, typename... Z, typename Dispatch> 7322 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const7323 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7324 { 7325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7326 7327 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 7328 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 7329 d.vkGetDeviceBufferMemoryRequirements( 7330 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 7331 7332 return structureChain; 7333 } 7334 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7335 7336 template <typename Dispatch> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const7337 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 7338 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 7339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7340 { 7341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7342 d.vkGetDeviceImageMemoryRequirements( 7343 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 7344 } 7345 7346 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7347 template <typename Dispatch> 7348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const7349 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7350 { 7351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7352 7353 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 7354 d.vkGetDeviceImageMemoryRequirements( 7355 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 7356 7357 return memoryRequirements; 7358 } 7359 7360 template <typename X, typename Y, typename... Z, typename Dispatch> 7361 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const7362 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7363 { 7364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7365 7366 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 7367 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 7368 d.vkGetDeviceImageMemoryRequirements( 7369 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 7370 7371 return structureChain; 7372 } 7373 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7374 7375 template <typename Dispatch> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const7376 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 7377 uint32_t * pSparseMemoryRequirementCount, 7378 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 7379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7380 { 7381 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7382 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 7383 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 7384 pSparseMemoryRequirementCount, 7385 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 7386 } 7387 7388 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7389 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 7390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const7391 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 7392 { 7393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7394 7395 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 7396 uint32_t sparseMemoryRequirementCount; 7397 d.vkGetDeviceImageSparseMemoryRequirements( 7398 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 7399 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 7400 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 7401 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 7402 &sparseMemoryRequirementCount, 7403 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 7404 7405 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 7406 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 7407 { 7408 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 7409 } 7410 return sparseMemoryRequirements; 7411 } 7412 7413 template <typename SparseImageMemoryRequirements2Allocator, 7414 typename Dispatch, 7415 typename B1, 7416 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, int>::type> 7417 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const7418 Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 7419 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 7420 Dispatch const & d ) const 7421 { 7422 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7423 7424 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 7425 sparseImageMemoryRequirements2Allocator ); 7426 uint32_t sparseMemoryRequirementCount; 7427 d.vkGetDeviceImageSparseMemoryRequirements( 7428 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 7429 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 7430 d.vkGetDeviceImageSparseMemoryRequirements( m_device, 7431 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 7432 &sparseMemoryRequirementCount, 7433 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 7434 7435 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 7436 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 7437 { 7438 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 7439 } 7440 return sparseMemoryRequirements; 7441 } 7442 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7443 7444 //=== VK_KHR_surface === 7445 7446 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7447 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7448 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7449 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7450 { 7451 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7452 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7453 } 7454 7455 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7456 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7457 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7458 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7460 { 7461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7462 7463 d.vkDestroySurfaceKHR( m_instance, 7464 static_cast<VkSurfaceKHR>( surface ), 7465 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7466 } 7467 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7468 7469 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7470 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7471 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7472 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7473 { 7474 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7475 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7476 } 7477 7478 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7479 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7480 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7481 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7482 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7483 { 7484 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7485 7486 d.vkDestroySurfaceKHR( m_instance, 7487 static_cast<VkSurfaceKHR>( surface ), 7488 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7489 } 7490 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7491 7492 template <typename Dispatch> getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::Bool32 * pSupported,Dispatch const & d) const7493 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, 7494 VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7495 VULKAN_HPP_NAMESPACE::Bool32 * pSupported, 7496 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7497 { 7498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7499 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 7500 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); 7501 } 7502 7503 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7504 template <typename Dispatch> 7505 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const7506 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 7507 { 7508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7509 7510 VULKAN_HPP_NAMESPACE::Bool32 supported; 7511 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( 7512 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) ); 7513 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); 7514 7515 return createResultValueType( result, supported ); 7516 } 7517 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7518 7519 template <typename Dispatch> getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,Dispatch const & d) const7520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7521 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, 7522 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7523 { 7524 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7525 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 7526 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); 7527 } 7528 7529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7530 template <typename Dispatch> 7531 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const7532 PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 7533 { 7534 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7535 7536 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; 7537 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 7538 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) ); 7539 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); 7540 7541 return createResultValueType( result, surfaceCapabilities ); 7542 } 7543 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7544 7545 template <typename Dispatch> getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,Dispatch const & d) const7546 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7547 uint32_t * pSurfaceFormatCount, 7548 VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, 7549 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7550 { 7551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7552 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 7553 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); 7554 } 7555 7556 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7557 template <typename SurfaceFormatKHRAllocator, typename Dispatch> 7558 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const7559 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 7560 { 7561 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7562 7563 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; 7564 uint32_t surfaceFormatCount; 7565 VULKAN_HPP_NAMESPACE::Result result; 7566 do 7567 { 7568 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7569 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 7570 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 7571 { 7572 surfaceFormats.resize( surfaceFormatCount ); 7573 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 7574 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 7575 } 7576 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7577 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 7578 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 7579 if ( surfaceFormatCount < surfaceFormats.size() ) 7580 { 7581 surfaceFormats.resize( surfaceFormatCount ); 7582 } 7583 return createResultValueType( result, surfaceFormats ); 7584 } 7585 7586 template <typename SurfaceFormatKHRAllocator, 7587 typename Dispatch, 7588 typename B1, 7589 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, int>::type> 7590 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,Dispatch const & d) const7591 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7592 SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, 7593 Dispatch const & d ) const 7594 { 7595 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7596 7597 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); 7598 uint32_t surfaceFormatCount; 7599 VULKAN_HPP_NAMESPACE::Result result; 7600 do 7601 { 7602 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7603 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 7604 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 7605 { 7606 surfaceFormats.resize( surfaceFormatCount ); 7607 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 7608 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 7609 } 7610 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7611 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 7612 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 7613 if ( surfaceFormatCount < surfaceFormats.size() ) 7614 { 7615 surfaceFormats.resize( surfaceFormatCount ); 7616 } 7617 return createResultValueType( result, surfaceFormats ); 7618 } 7619 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7620 7621 template <typename Dispatch> getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const7622 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7623 uint32_t * pPresentModeCount, 7624 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 7625 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7626 { 7627 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7628 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 7629 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 7630 } 7631 7632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7633 template <typename PresentModeKHRAllocator, typename Dispatch> 7634 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const7635 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 7636 { 7637 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7638 7639 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 7640 uint32_t presentModeCount; 7641 VULKAN_HPP_NAMESPACE::Result result; 7642 do 7643 { 7644 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7645 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 7646 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 7647 { 7648 presentModes.resize( presentModeCount ); 7649 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 7650 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 7651 } 7652 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7653 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 7654 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 7655 if ( presentModeCount < presentModes.size() ) 7656 { 7657 presentModes.resize( presentModeCount ); 7658 } 7659 return createResultValueType( result, presentModes ); 7660 } 7661 7662 template <typename PresentModeKHRAllocator, 7663 typename Dispatch, 7664 typename B1, 7665 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 7666 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const7667 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7668 PresentModeKHRAllocator & presentModeKHRAllocator, 7669 Dispatch const & d ) const 7670 { 7671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7672 7673 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 7674 uint32_t presentModeCount; 7675 VULKAN_HPP_NAMESPACE::Result result; 7676 do 7677 { 7678 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7679 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 7680 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 7681 { 7682 presentModes.resize( presentModeCount ); 7683 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 7684 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 7685 } 7686 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7687 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 7688 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 7689 if ( presentModeCount < presentModes.size() ) 7690 { 7691 presentModes.resize( presentModeCount ); 7692 } 7693 return createResultValueType( result, presentModes ); 7694 } 7695 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7696 7697 //=== VK_KHR_swapchain === 7698 7699 template <typename Dispatch> createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,Dispatch const & d) const7700 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, 7701 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7702 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, 7703 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7704 { 7705 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7706 return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, 7707 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), 7708 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7709 reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); 7710 } 7711 7712 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7713 template <typename Dispatch> 7714 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7715 Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 7716 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7717 Dispatch const & d ) const 7718 { 7719 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7720 7721 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 7722 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7723 d.vkCreateSwapchainKHR( m_device, 7724 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 7725 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7726 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 7727 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); 7728 7729 return createResultValueType( result, swapchain ); 7730 } 7731 7732 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7733 template <typename Dispatch> 7734 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7735 Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 7736 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7737 Dispatch const & d ) const 7738 { 7739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7740 7741 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 7742 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7743 d.vkCreateSwapchainKHR( m_device, 7744 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 7745 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7746 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 7747 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); 7748 7749 return createResultValueType( 7750 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 7751 } 7752 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 7753 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7754 7755 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7756 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7757 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7759 { 7760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7761 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7762 } 7763 7764 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7765 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7766 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7767 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7769 { 7770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7771 7772 d.vkDestroySwapchainKHR( m_device, 7773 static_cast<VkSwapchainKHR>( swapchain ), 7774 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7775 } 7776 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7777 7778 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7779 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7780 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7781 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7782 { 7783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7784 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 7785 } 7786 7787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7788 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7789 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7790 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 7791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7792 { 7793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7794 7795 d.vkDestroySwapchainKHR( m_device, 7796 static_cast<VkSwapchainKHR>( swapchain ), 7797 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 7798 } 7799 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7800 7801 template <typename Dispatch> getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,Dispatch const & d) const7802 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7803 uint32_t * pSwapchainImageCount, 7804 VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, 7805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7806 { 7807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7808 return static_cast<Result>( 7809 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); 7810 } 7811 7812 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7813 template <typename ImageAllocator, typename Dispatch> 7814 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const7815 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 7816 { 7817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7818 7819 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages; 7820 uint32_t swapchainImageCount; 7821 VULKAN_HPP_NAMESPACE::Result result; 7822 do 7823 { 7824 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7825 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 7826 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 7827 { 7828 swapchainImages.resize( swapchainImageCount ); 7829 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 7830 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 7831 } 7832 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7833 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 7834 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 7835 if ( swapchainImageCount < swapchainImages.size() ) 7836 { 7837 swapchainImages.resize( swapchainImageCount ); 7838 } 7839 return createResultValueType( result, swapchainImages ); 7840 } 7841 7842 template <typename ImageAllocator, 7843 typename Dispatch, 7844 typename B1, 7845 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::Image>::value, int>::type> 7846 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,ImageAllocator & imageAllocator,Dispatch const & d) const7847 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const 7848 { 7849 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7850 7851 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator ); 7852 uint32_t swapchainImageCount; 7853 VULKAN_HPP_NAMESPACE::Result result; 7854 do 7855 { 7856 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7857 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 7858 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) 7859 { 7860 swapchainImages.resize( swapchainImageCount ); 7861 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainImagesKHR( 7862 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 7863 } 7864 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 7865 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 7866 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 7867 if ( swapchainImageCount < swapchainImages.size() ) 7868 { 7869 swapchainImages.resize( swapchainImageCount ); 7870 } 7871 return createResultValueType( result, swapchainImages ); 7872 } 7873 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7874 7875 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,uint32_t * pImageIndex,Dispatch const & d) const7876 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7877 uint64_t timeout, 7878 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7879 VULKAN_HPP_NAMESPACE::Fence fence, 7880 uint32_t * pImageIndex, 7881 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7882 { 7883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7884 return static_cast<Result>( d.vkAcquireNextImageKHR( 7885 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) ); 7886 } 7887 7888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7889 template <typename Dispatch> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const7890 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 7891 uint64_t timeout, 7892 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 7893 VULKAN_HPP_NAMESPACE::Fence fence, 7894 Dispatch const & d ) const 7895 { 7896 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7897 7898 uint32_t imageIndex; 7899 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireNextImageKHR( 7900 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) ); 7901 resultCheck( result, 7902 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", 7903 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 7904 VULKAN_HPP_NAMESPACE::Result::eTimeout, 7905 VULKAN_HPP_NAMESPACE::Result::eNotReady, 7906 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 7907 7908 return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex ); 7909 } 7910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7911 7912 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,Dispatch const & d) const7913 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, 7914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7915 { 7916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7917 return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); 7918 } 7919 7920 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7921 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,Dispatch const & d) const7922 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, 7923 Dispatch const & d ) const 7924 { 7925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7926 7927 VULKAN_HPP_NAMESPACE::Result result = 7928 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) ); 7929 resultCheck( 7930 result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 7931 7932 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 7933 } 7934 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7935 7936 template <typename Dispatch> getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,Dispatch const & d) const7937 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( 7938 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7939 { 7940 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7941 return static_cast<Result>( 7942 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); 7943 } 7944 7945 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7946 template <typename Dispatch> 7947 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const & d) const7948 Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const 7949 { 7950 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7951 7952 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; 7953 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 7954 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) ); 7955 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); 7956 7957 return createResultValueType( result, deviceGroupPresentCapabilities ); 7958 } 7959 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7960 7961 template <typename Dispatch> getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const7962 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7963 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 7964 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7965 { 7966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7967 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 7968 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 7969 } 7970 7971 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7972 template <typename Dispatch> 7973 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const7974 Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 7975 { 7976 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7977 7978 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 7979 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( 7980 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 7981 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); 7982 7983 return createResultValueType( result, modes ); 7984 } 7985 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 7986 7987 template <typename Dispatch> getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pRectCount,VULKAN_HPP_NAMESPACE::Rect2D * pRects,Dispatch const & d) const7988 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7989 uint32_t * pRectCount, 7990 VULKAN_HPP_NAMESPACE::Rect2D * pRects, 7991 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7992 { 7993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 7994 return static_cast<Result>( 7995 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); 7996 } 7997 7998 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7999 template <typename Rect2DAllocator, typename Dispatch> 8000 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const8001 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 8002 { 8003 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8004 8005 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects; 8006 uint32_t rectCount; 8007 VULKAN_HPP_NAMESPACE::Result result; 8008 do 8009 { 8010 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8011 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 8012 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 8013 { 8014 rects.resize( rectCount ); 8015 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 8016 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 8017 } 8018 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8019 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 8020 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 8021 if ( rectCount < rects.size() ) 8022 { 8023 rects.resize( rectCount ); 8024 } 8025 return createResultValueType( result, rects ); 8026 } 8027 8028 template <typename Rect2DAllocator, 8029 typename Dispatch, 8030 typename B1, 8031 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::Rect2D>::value, int>::type> 8032 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Rect2DAllocator & rect2DAllocator,Dispatch const & d) const8033 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const 8034 { 8035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8036 8037 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator ); 8038 uint32_t rectCount; 8039 VULKAN_HPP_NAMESPACE::Result result; 8040 do 8041 { 8042 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8043 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 8044 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) 8045 { 8046 rects.resize( rectCount ); 8047 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 8048 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 8049 } 8050 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8051 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 8052 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 8053 if ( rectCount < rects.size() ) 8054 { 8055 rects.resize( rectCount ); 8056 } 8057 return createResultValueType( result, rects ); 8058 } 8059 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8060 8061 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,Dispatch const & d) const8062 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, 8063 uint32_t * pImageIndex, 8064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8065 { 8066 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8067 return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); 8068 } 8069 8070 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8071 template <typename Dispatch> acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,Dispatch const & d) const8072 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, 8073 Dispatch const & d ) const 8074 { 8075 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8076 8077 uint32_t imageIndex; 8078 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8079 d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) ); 8080 resultCheck( result, 8081 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", 8082 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 8083 VULKAN_HPP_NAMESPACE::Result::eTimeout, 8084 VULKAN_HPP_NAMESPACE::Result::eNotReady, 8085 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 8086 8087 return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex ); 8088 } 8089 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8090 8091 //=== VK_KHR_display === 8092 8093 template <typename Dispatch> getDisplayPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,Dispatch const & d) const8094 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, 8095 VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, 8096 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8097 { 8098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8099 return static_cast<Result>( 8100 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); 8101 } 8102 8103 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8104 template <typename DisplayPropertiesKHRAllocator, typename Dispatch> 8105 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(Dispatch const & d) const8106 PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const 8107 { 8108 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8109 8110 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; 8111 uint32_t propertyCount; 8112 VULKAN_HPP_NAMESPACE::Result result; 8113 do 8114 { 8115 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 8116 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 8117 { 8118 properties.resize( propertyCount ); 8119 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8120 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 8121 } 8122 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8123 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 8124 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 8125 if ( propertyCount < properties.size() ) 8126 { 8127 properties.resize( propertyCount ); 8128 } 8129 return createResultValueType( result, properties ); 8130 } 8131 8132 template <typename DisplayPropertiesKHRAllocator, 8133 typename Dispatch, 8134 typename B1, 8135 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, int>::type> 8136 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,Dispatch const & d) const8137 PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const 8138 { 8139 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8140 8141 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); 8142 uint32_t propertyCount; 8143 VULKAN_HPP_NAMESPACE::Result result; 8144 do 8145 { 8146 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 8147 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 8148 { 8149 properties.resize( propertyCount ); 8150 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8151 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 8152 } 8153 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8154 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 8155 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 8156 if ( propertyCount < properties.size() ) 8157 { 8158 properties.resize( propertyCount ); 8159 } 8160 return createResultValueType( result, properties ); 8161 } 8162 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8163 8164 template <typename Dispatch> getDisplayPlanePropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,Dispatch const & d) const8165 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, 8166 VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, 8167 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8168 { 8169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8170 return static_cast<Result>( 8171 d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); 8172 } 8173 8174 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8175 template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch> 8176 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8177 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(Dispatch const & d) const8178 PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const 8179 { 8180 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8181 8182 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; 8183 uint32_t propertyCount; 8184 VULKAN_HPP_NAMESPACE::Result result; 8185 do 8186 { 8187 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 8188 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 8189 { 8190 properties.resize( propertyCount ); 8191 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 8192 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 8193 } 8194 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8195 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 8196 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 8197 if ( propertyCount < properties.size() ) 8198 { 8199 properties.resize( propertyCount ); 8200 } 8201 return createResultValueType( result, properties ); 8202 } 8203 8204 template <typename DisplayPlanePropertiesKHRAllocator, 8205 typename Dispatch, 8206 typename B1, 8207 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, int>::type> 8208 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8209 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,Dispatch const & d) const8210 PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const 8211 { 8212 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8213 8214 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator ); 8215 uint32_t propertyCount; 8216 VULKAN_HPP_NAMESPACE::Result result; 8217 do 8218 { 8219 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 8220 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 8221 { 8222 properties.resize( propertyCount ); 8223 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 8224 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 8225 } 8226 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8227 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 8228 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 8229 if ( propertyCount < properties.size() ) 8230 { 8231 properties.resize( propertyCount ); 8232 } 8233 return createResultValueType( result, properties ); 8234 } 8235 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8236 8237 template <typename Dispatch> getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,uint32_t * pDisplayCount,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,Dispatch const & d) const8238 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, 8239 uint32_t * pDisplayCount, 8240 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, 8241 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8242 { 8243 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8244 return static_cast<Result>( 8245 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); 8246 } 8247 8248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8249 template <typename DisplayKHRAllocator, typename Dispatch> 8250 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,Dispatch const & d) const8251 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const 8252 { 8253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8254 8255 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays; 8256 uint32_t displayCount; 8257 VULKAN_HPP_NAMESPACE::Result result; 8258 do 8259 { 8260 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 8261 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 8262 { 8263 displays.resize( displayCount ); 8264 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8265 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 8266 } 8267 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8268 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 8269 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 8270 if ( displayCount < displays.size() ) 8271 { 8272 displays.resize( displayCount ); 8273 } 8274 return createResultValueType( result, displays ); 8275 } 8276 8277 template <typename DisplayKHRAllocator, 8278 typename Dispatch, 8279 typename B1, 8280 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayKHR>::value, int>::type> 8281 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,DisplayKHRAllocator & displayKHRAllocator,Dispatch const & d) const8282 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const 8283 { 8284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8285 8286 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); 8287 uint32_t displayCount; 8288 VULKAN_HPP_NAMESPACE::Result result; 8289 do 8290 { 8291 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 8292 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) 8293 { 8294 displays.resize( displayCount ); 8295 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8296 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 8297 } 8298 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8299 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 8300 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 8301 if ( displayCount < displays.size() ) 8302 { 8303 displays.resize( displayCount ); 8304 } 8305 return createResultValueType( result, displays ); 8306 } 8307 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8308 8309 template <typename Dispatch> getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,Dispatch const & d) const8310 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 8311 uint32_t * pPropertyCount, 8312 VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, 8313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8314 { 8315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8316 return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( 8317 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); 8318 } 8319 8320 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8321 template <typename DisplayModePropertiesKHRAllocator, typename Dispatch> 8322 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8323 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const8324 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 8325 { 8326 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8327 8328 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; 8329 uint32_t propertyCount; 8330 VULKAN_HPP_NAMESPACE::Result result; 8331 do 8332 { 8333 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8334 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 8335 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 8336 { 8337 properties.resize( propertyCount ); 8338 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 8339 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 8340 } 8341 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8342 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 8343 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 8344 if ( propertyCount < properties.size() ) 8345 { 8346 properties.resize( propertyCount ); 8347 } 8348 return createResultValueType( result, properties ); 8349 } 8350 8351 template <typename DisplayModePropertiesKHRAllocator, 8352 typename Dispatch, 8353 typename B1, 8354 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, int>::type> 8355 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8356 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,Dispatch const & d) const8357 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 8358 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, 8359 Dispatch const & d ) const 8360 { 8361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8362 8363 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator ); 8364 uint32_t propertyCount; 8365 VULKAN_HPP_NAMESPACE::Result result; 8366 do 8367 { 8368 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8369 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 8370 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 8371 { 8372 properties.resize( propertyCount ); 8373 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModePropertiesKHR( 8374 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 8375 } 8376 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 8377 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 8378 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 8379 if ( propertyCount < properties.size() ) 8380 { 8381 properties.resize( propertyCount ); 8382 } 8383 return createResultValueType( result, properties ); 8384 } 8385 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8386 8387 template <typename Dispatch> createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,Dispatch const & d) const8388 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 8389 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, 8390 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8391 VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, 8392 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8393 { 8394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8395 return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, 8396 static_cast<VkDisplayKHR>( display ), 8397 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), 8398 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8399 reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); 8400 } 8401 8402 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8403 template <typename Dispatch> 8404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8405 PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 8406 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 8407 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8408 Dispatch const & d ) const 8409 { 8410 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8411 8412 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 8413 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8414 d.vkCreateDisplayModeKHR( m_physicalDevice, 8415 static_cast<VkDisplayKHR>( display ), 8416 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 8417 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8418 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 8419 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); 8420 8421 return createResultValueType( result, mode ); 8422 } 8423 8424 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8425 template <typename Dispatch> 8426 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8427 PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 8428 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, 8429 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8430 Dispatch const & d ) const 8431 { 8432 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8433 8434 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 8435 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8436 d.vkCreateDisplayModeKHR( m_physicalDevice, 8437 static_cast<VkDisplayKHR>( display ), 8438 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 8439 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8440 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 8441 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); 8442 8443 return createResultValueType( 8444 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) ); 8445 } 8446 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8447 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8448 8449 template <typename Dispatch> 8450 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,Dispatch const & d) const8451 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, 8452 uint32_t planeIndex, 8453 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, 8454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8455 { 8456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8457 return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 8458 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); 8459 } 8460 8461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8462 template <typename Dispatch> 8463 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,Dispatch const & d) const8464 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const 8465 { 8466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8467 8468 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; 8469 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilitiesKHR( 8470 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) ); 8471 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); 8472 8473 return createResultValueType( result, capabilities ); 8474 } 8475 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8476 8477 template <typename Dispatch> createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8478 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, 8479 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8480 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 8481 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8482 { 8483 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8484 return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, 8485 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), 8486 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8487 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 8488 } 8489 8490 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8491 template <typename Dispatch> 8492 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8493 Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 8494 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8495 Dispatch const & d ) const 8496 { 8497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8498 8499 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8500 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 8501 m_instance, 8502 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 8503 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8504 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8505 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); 8506 8507 return createResultValueType( result, surface ); 8508 } 8509 8510 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8511 template <typename Dispatch> 8512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8513 Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, 8514 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8515 Dispatch const & d ) const 8516 { 8517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8518 8519 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8520 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDisplayPlaneSurfaceKHR( 8521 m_instance, 8522 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 8523 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8524 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8525 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); 8526 8527 return createResultValueType( 8528 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 8529 } 8530 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8532 8533 //=== VK_KHR_display_swapchain === 8534 8535 template <typename Dispatch> createSharedSwapchainsKHR(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,Dispatch const & d) const8536 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, 8537 const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, 8538 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8539 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 8540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8541 { 8542 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8543 return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 8544 swapchainCount, 8545 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), 8546 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8547 reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); 8548 } 8549 8550 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8551 template <typename SwapchainKHRAllocator, typename Dispatch> 8552 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8553 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 8554 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8555 Dispatch const & d ) const 8556 { 8557 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8558 8559 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); 8560 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 8561 m_device, 8562 createInfos.size(), 8563 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 8564 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8565 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 8566 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 8567 8568 return createResultValueType( result, swapchains ); 8569 } 8570 8571 template <typename SwapchainKHRAllocator, 8572 typename Dispatch, 8573 typename B0, 8574 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, int>::type> 8575 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const8576 Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 8577 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8578 SwapchainKHRAllocator & swapchainKHRAllocator, 8579 Dispatch const & d ) const 8580 { 8581 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8582 8583 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); 8584 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 8585 m_device, 8586 createInfos.size(), 8587 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 8588 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8589 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 8590 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 8591 8592 return createResultValueType( result, swapchains ); 8593 } 8594 8595 template <typename Dispatch> 8596 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8597 Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 8598 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8599 Dispatch const & d ) const 8600 { 8601 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8602 8603 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 8604 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 8605 m_device, 8606 1, 8607 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 8608 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8609 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 8610 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); 8611 8612 return createResultValueType( result, swapchain ); 8613 } 8614 8615 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8616 template <typename Dispatch, typename SwapchainKHRAllocator> 8617 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8618 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8619 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 8620 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8621 Dispatch const & d ) const 8622 { 8623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8624 8625 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 8626 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 8627 m_device, 8628 createInfos.size(), 8629 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 8630 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8631 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 8632 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 8633 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; 8634 uniqueSwapchains.reserve( createInfos.size() ); 8635 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 8636 for ( auto const & swapchain : swapchains ) 8637 { 8638 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 8639 } 8640 return createResultValueType( result, std::move( uniqueSwapchains ) ); 8641 } 8642 8643 template <typename Dispatch, 8644 typename SwapchainKHRAllocator, 8645 typename B0, 8646 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::value, int>::type> 8647 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8648 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const8649 Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 8650 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8651 SwapchainKHRAllocator & swapchainKHRAllocator, 8652 Dispatch const & d ) const 8653 { 8654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8655 8656 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); 8657 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 8658 m_device, 8659 createInfos.size(), 8660 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 8661 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8662 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 8663 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 8664 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); 8665 uniqueSwapchains.reserve( createInfos.size() ); 8666 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 8667 for ( auto const & swapchain : swapchains ) 8668 { 8669 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); 8670 } 8671 return createResultValueType( result, std::move( uniqueSwapchains ) ); 8672 } 8673 8674 template <typename Dispatch> 8675 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8676 Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 8677 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8678 Dispatch const & d ) const 8679 { 8680 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8681 8682 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 8683 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSharedSwapchainsKHR( 8684 m_device, 8685 1, 8686 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 8687 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8688 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 8689 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); 8690 8691 return createResultValueType( 8692 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 8693 } 8694 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8695 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8696 8697 #if defined( VK_USE_PLATFORM_XLIB_KHR ) 8698 //=== VK_KHR_xlib_surface === 8699 8700 template <typename Dispatch> createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8701 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, 8702 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8703 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 8704 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8705 { 8706 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8707 return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, 8708 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), 8709 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8710 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 8711 } 8712 8713 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8714 template <typename Dispatch> 8715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8716 Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 8717 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8718 Dispatch const & d ) const 8719 { 8720 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8721 8722 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8723 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8724 d.vkCreateXlibSurfaceKHR( m_instance, 8725 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 8726 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8727 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8728 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); 8729 8730 return createResultValueType( result, surface ); 8731 } 8732 8733 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8734 template <typename Dispatch> 8735 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8736 Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, 8737 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8738 Dispatch const & d ) const 8739 { 8740 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8741 8742 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8743 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8744 d.vkCreateXlibSurfaceKHR( m_instance, 8745 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 8746 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8747 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8748 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); 8749 8750 return createResultValueType( 8751 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 8752 } 8753 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8754 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8755 8756 template <typename Dispatch> 8757 VULKAN_HPP_INLINE Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display * dpy,VisualID visualID,Dispatch const & d) const8758 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8759 { 8760 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8761 return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); 8762 } 8763 8764 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8765 template <typename Dispatch> 8766 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display & dpy,VisualID visualID,Dispatch const & d) const8767 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8768 { 8769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8770 8771 VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); 8772 8773 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 8774 } 8775 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8776 #endif /*VK_USE_PLATFORM_XLIB_KHR*/ 8777 8778 #if defined( VK_USE_PLATFORM_XCB_KHR ) 8779 //=== VK_KHR_xcb_surface === 8780 8781 template <typename Dispatch> createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8782 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, 8783 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8784 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 8785 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8786 { 8787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8788 return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, 8789 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), 8790 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8791 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 8792 } 8793 8794 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8795 template <typename Dispatch> 8796 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8797 Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 8798 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8799 Dispatch const & d ) const 8800 { 8801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8802 8803 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8804 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8805 d.vkCreateXcbSurfaceKHR( m_instance, 8806 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 8807 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8808 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8809 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); 8810 8811 return createResultValueType( result, surface ); 8812 } 8813 8814 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8815 template <typename Dispatch> 8816 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8817 Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, 8818 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8819 Dispatch const & d ) const 8820 { 8821 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8822 8823 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8824 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 8825 d.vkCreateXcbSurfaceKHR( m_instance, 8826 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 8827 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8828 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8829 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); 8830 8831 return createResultValueType( 8832 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 8833 } 8834 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8835 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8836 8837 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id,Dispatch const & d) const8838 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 8839 xcb_connection_t * connection, 8840 xcb_visualid_t visual_id, 8841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8842 { 8843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8844 return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); 8845 } 8846 8847 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8848 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t & connection,xcb_visualid_t visual_id,Dispatch const & d) const8849 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 8850 xcb_connection_t & connection, 8851 xcb_visualid_t visual_id, 8852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8853 { 8854 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8855 8856 VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); 8857 8858 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 8859 } 8860 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8861 #endif /*VK_USE_PLATFORM_XCB_KHR*/ 8862 8863 #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) 8864 //=== VK_KHR_wayland_surface === 8865 8866 template <typename Dispatch> createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, 8868 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8869 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 8870 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8871 { 8872 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8873 return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, 8874 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), 8875 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8876 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 8877 } 8878 8879 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8880 template <typename Dispatch> 8881 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8882 Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 8883 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8884 Dispatch const & d ) const 8885 { 8886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8887 8888 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8889 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 8890 m_instance, 8891 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 8892 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8893 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8894 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); 8895 8896 return createResultValueType( result, surface ); 8897 } 8898 8899 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8900 template <typename Dispatch> 8901 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8902 Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, 8903 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8904 Dispatch const & d ) const 8905 { 8906 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8907 8908 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8909 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateWaylandSurfaceKHR( 8910 m_instance, 8911 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 8912 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8913 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8914 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); 8915 8916 return createResultValueType( 8917 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 8918 } 8919 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 8920 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8921 8922 template <typename Dispatch> getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display * display,Dispatch const & d) const8923 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, 8924 struct wl_display * display, 8925 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8926 { 8927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8928 return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); 8929 } 8930 8931 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8932 template <typename Dispatch> 8933 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display & display,Dispatch const & d) const8934 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8935 { 8936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8937 8938 VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); 8939 8940 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 8941 } 8942 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 8943 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ 8944 8945 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 8946 //=== VK_KHR_android_surface === 8947 8948 template <typename Dispatch> createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8949 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, 8950 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8951 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 8952 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8953 { 8954 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8955 return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, 8956 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), 8957 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8958 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 8959 } 8960 8961 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8962 template <typename Dispatch> 8963 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8964 Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 8965 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8966 Dispatch const & d ) const 8967 { 8968 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8969 8970 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8971 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 8972 m_instance, 8973 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 8974 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8975 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8976 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); 8977 8978 return createResultValueType( result, surface ); 8979 } 8980 8981 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8982 template <typename Dispatch> 8983 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8984 Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, 8985 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 8986 Dispatch const & d ) const 8987 { 8988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 8989 8990 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8991 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAndroidSurfaceKHR( 8992 m_instance, 8993 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 8994 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8995 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8996 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); 8997 8998 return createResultValueType( 8999 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9000 } 9001 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9002 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9003 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 9004 9005 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 9006 //=== VK_KHR_win32_surface === 9007 9008 template <typename Dispatch> createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9009 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, 9010 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9011 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9012 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9013 { 9014 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9015 return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, 9016 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), 9017 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9018 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9019 } 9020 9021 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9022 template <typename Dispatch> 9023 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9024 Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 9025 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9026 Dispatch const & d ) const 9027 { 9028 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9029 9030 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9031 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9032 d.vkCreateWin32SurfaceKHR( m_instance, 9033 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 9034 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9035 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9036 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); 9037 9038 return createResultValueType( result, surface ); 9039 } 9040 9041 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9042 template <typename Dispatch> 9043 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9044 Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, 9045 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9046 Dispatch const & d ) const 9047 { 9048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9049 9050 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9051 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9052 d.vkCreateWin32SurfaceKHR( m_instance, 9053 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 9054 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9055 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9056 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); 9057 9058 return createResultValueType( 9059 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9060 } 9061 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9062 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9063 9064 template <typename Dispatch> getWin32PresentationSupportKHR(uint32_t queueFamilyIndex,Dispatch const & d) const9065 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9066 { 9067 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9068 return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); 9069 } 9070 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 9071 9072 #if defined( VK_USE_PLATFORM_OHOS ) 9073 //=== VK_OHOS_surface === 9074 9075 template <typename Dispatch> 9076 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSurfaceOHOS(const VULKAN_HPP_NAMESPACE::SurfaceCreateInfoOHOS * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9077 Instance::createSurfaceOHOS( const VULKAN_HPP_NAMESPACE::SurfaceCreateInfoOHOS * pCreateInfo, 9078 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9079 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9080 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9081 { 9082 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9083 return static_cast<Result>( 9084 d.vkCreateSurfaceOHOS( m_instance, 9085 reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( pCreateInfo ), 9086 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9087 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9088 } 9089 9090 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9091 template <typename Dispatch> 9092 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9093 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createSurfaceOHOS(const VULKAN_HPP_NAMESPACE::SurfaceCreateInfoOHOS & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9094 Instance::createSurfaceOHOS( const VULKAN_HPP_NAMESPACE::SurfaceCreateInfoOHOS & createInfo, 9095 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9096 Dispatch const & d ) const 9097 { 9098 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9099 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9100 VkResult result = d.vkCreateSurfaceOHOS( 9101 m_instance, 9102 reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( &createInfo ), 9103 reinterpret_cast<const VkAllocationCallbacks *>( 9104 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9105 reinterpret_cast<VkSurfaceKHR *>( &surface ) ); 9106 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 9107 VULKAN_HPP_NAMESPACE_STRING "::Instance::createSurfaceOHOS" ); 9108 9109 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); 9110 } 9111 9112 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9113 template <typename Dispatch> 9114 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9115 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createSurfaceOHOSUnique(const VULKAN_HPP_NAMESPACE::SurfaceCreateInfoOHOS & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9116 Instance::createSurfaceOHOSUnique( const VULKAN_HPP_NAMESPACE::SurfaceCreateInfoOHOS & createInfo, 9117 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9118 Dispatch const & d ) const 9119 { 9120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9121 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9122 VkResult result = d.vkCreateSurfaceOHOS( 9123 m_instance, 9124 reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( &createInfo ), 9125 reinterpret_cast<const VkAllocationCallbacks *>( 9126 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9127 reinterpret_cast<VkSurfaceKHR *>( &surface ) ); 9128 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 9129 VULKAN_HPP_NAMESPACE_STRING "::Instance::createSurfaceOHOSUnique" ); 9130 9131 return createResultValueType( 9132 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 9133 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 9134 surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9135 } 9136 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 9137 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9138 #endif /*VK_USE_PLATFORM_OHOS*/ 9139 9140 //=== VK_EXT_debug_report === 9141 9142 template <typename Dispatch> 9143 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,Dispatch const & d) const9144 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, 9145 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9146 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, 9147 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9148 { 9149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9150 return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, 9151 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), 9152 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9153 reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); 9154 } 9155 9156 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9157 template <typename Dispatch> 9158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9159 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 9160 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9161 Dispatch const & d ) const 9162 { 9163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9164 9165 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 9166 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 9167 m_instance, 9168 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 9169 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9170 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 9171 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); 9172 9173 return createResultValueType( result, callback ); 9174 } 9175 9176 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9177 template <typename Dispatch> 9178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9179 Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, 9180 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9181 Dispatch const & d ) const 9182 { 9183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9184 9185 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 9186 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugReportCallbackEXT( 9187 m_instance, 9188 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 9189 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9190 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 9191 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); 9192 9193 return createResultValueType( 9194 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 9195 } 9196 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9197 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9198 9199 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9200 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 9201 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9202 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9203 { 9204 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9205 d.vkDestroyDebugReportCallbackEXT( 9206 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9207 } 9208 9209 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9210 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9211 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 9212 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9213 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9214 { 9215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9216 9217 d.vkDestroyDebugReportCallbackEXT( 9218 m_instance, 9219 static_cast<VkDebugReportCallbackEXT>( callback ), 9220 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9221 } 9222 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9223 9224 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9225 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 9226 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9227 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9228 { 9229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9230 d.vkDestroyDebugReportCallbackEXT( 9231 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9232 } 9233 9234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9235 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9236 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 9237 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9239 { 9240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9241 9242 d.vkDestroyDebugReportCallbackEXT( 9243 m_instance, 9244 static_cast<VkDebugReportCallbackEXT>( callback ), 9245 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9246 } 9247 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9248 9249 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,Dispatch const & d) const9250 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 9251 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 9252 uint64_t object, 9253 size_t location, 9254 int32_t messageCode, 9255 const char * pLayerPrefix, 9256 const char * pMessage, 9257 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9258 { 9259 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9260 d.vkDebugReportMessageEXT( m_instance, 9261 static_cast<VkDebugReportFlagsEXT>( flags ), 9262 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 9263 object, 9264 location, 9265 messageCode, 9266 pLayerPrefix, 9267 pMessage ); 9268 } 9269 9270 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9271 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,uint64_t object,size_t location,int32_t messageCode,const std::string & layerPrefix,const std::string & message,Dispatch const & d) const9272 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 9273 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, 9274 uint64_t object, 9275 size_t location, 9276 int32_t messageCode, 9277 const std::string & layerPrefix, 9278 const std::string & message, 9279 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9280 { 9281 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9282 9283 d.vkDebugReportMessageEXT( m_instance, 9284 static_cast<VkDebugReportFlagsEXT>( flags ), 9285 static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), 9286 object, 9287 location, 9288 messageCode, 9289 layerPrefix.c_str(), 9290 message.c_str() ); 9291 } 9292 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9293 9294 //=== VK_EXT_debug_marker === 9295 9296 template <typename Dispatch> debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,Dispatch const & d) const9297 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, 9298 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9299 { 9300 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9301 return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); 9302 } 9303 9304 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9305 template <typename Dispatch> 9306 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo,Dispatch const & d) const9307 Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 9308 { 9309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9310 9311 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9312 d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) ); 9313 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); 9314 9315 return createResultValueType( result ); 9316 } 9317 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9318 9319 template <typename Dispatch> debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,Dispatch const & d) const9320 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, 9321 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9322 { 9323 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9324 return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); 9325 } 9326 9327 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9328 template <typename Dispatch> 9329 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo,Dispatch const & d) const9330 Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 9331 { 9332 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9333 9334 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9335 d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) ); 9336 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); 9337 9338 return createResultValueType( result ); 9339 } 9340 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9341 9342 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const9343 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 9344 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9345 { 9346 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9347 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 9348 } 9349 9350 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9351 template <typename Dispatch> debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const9352 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 9353 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9354 { 9355 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9356 9357 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 9358 } 9359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9360 9361 template <typename Dispatch> debugMarkerEndEXT(Dispatch const & d) const9362 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9363 { 9364 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9365 d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); 9366 } 9367 9368 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const9369 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 9370 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9371 { 9372 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9373 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 9374 } 9375 9376 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9377 template <typename Dispatch> debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const9378 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, 9379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9380 { 9381 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9382 9383 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 9384 } 9385 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9386 9387 //=== VK_KHR_video_queue === 9388 9389 template <typename Dispatch> getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,Dispatch const & d) const9390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, 9391 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, 9392 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9393 { 9394 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9395 return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 9396 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); 9397 } 9398 9399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9400 template <typename Dispatch> 9401 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const9402 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 9403 { 9404 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9405 9406 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; 9407 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 9408 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 9409 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 9410 9411 return createResultValueType( result, capabilities ); 9412 } 9413 9414 template <typename X, typename Y, typename... Z, typename Dispatch> 9415 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,Dispatch const & d) const9416 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const 9417 { 9418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9419 9420 StructureChain<X, Y, Z...> structureChain; 9421 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>(); 9422 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( 9423 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 9424 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 9425 9426 return createResultValueType( result, structureChain ); 9427 } 9428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9429 9430 template <typename Dispatch> 9431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,uint32_t * pVideoFormatPropertyCount,VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,Dispatch const & d) const9432 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, 9433 uint32_t * pVideoFormatPropertyCount, 9434 VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, 9435 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9436 { 9437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9438 return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 9439 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), 9440 pVideoFormatPropertyCount, 9441 reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); 9442 } 9443 9444 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9445 template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch> 9446 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9447 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,Dispatch const & d) const9448 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const 9449 { 9450 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9451 9452 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; 9453 uint32_t videoFormatPropertyCount; 9454 VULKAN_HPP_NAMESPACE::Result result; 9455 do 9456 { 9457 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 9458 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 9459 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 9460 { 9461 videoFormatProperties.resize( videoFormatPropertyCount ); 9462 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9463 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 9464 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 9465 &videoFormatPropertyCount, 9466 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 9467 } 9468 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9469 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 9470 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 9471 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 9472 { 9473 videoFormatProperties.resize( videoFormatPropertyCount ); 9474 } 9475 return createResultValueType( result, videoFormatProperties ); 9476 } 9477 9478 template <typename VideoFormatPropertiesKHRAllocator, 9479 typename Dispatch, 9480 typename B1, 9481 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, int>::type> 9482 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9483 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,Dispatch const & d) const9484 PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 9485 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, 9486 Dispatch const & d ) const 9487 { 9488 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9489 9490 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator ); 9491 uint32_t videoFormatPropertyCount; 9492 VULKAN_HPP_NAMESPACE::Result result; 9493 do 9494 { 9495 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 9496 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); 9497 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) 9498 { 9499 videoFormatProperties.resize( videoFormatPropertyCount ); 9500 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9501 d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, 9502 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 9503 &videoFormatPropertyCount, 9504 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 9505 } 9506 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9507 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 9508 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 9509 if ( videoFormatPropertyCount < videoFormatProperties.size() ) 9510 { 9511 videoFormatProperties.resize( videoFormatPropertyCount ); 9512 } 9513 return createResultValueType( result, videoFormatProperties ); 9514 } 9515 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9516 9517 template <typename Dispatch> createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,Dispatch const & d) const9518 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, 9519 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9520 VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, 9521 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9522 { 9523 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9524 return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, 9525 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), 9526 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9527 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); 9528 } 9529 9530 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9531 template <typename Dispatch> 9532 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9533 Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 9534 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9535 Dispatch const & d ) const 9536 { 9537 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9538 9539 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 9540 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9541 d.vkCreateVideoSessionKHR( m_device, 9542 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 9543 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9544 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 9545 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); 9546 9547 return createResultValueType( result, videoSession ); 9548 } 9549 9550 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9551 template <typename Dispatch> 9552 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type createVideoSessionKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9553 Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, 9554 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9555 Dispatch const & d ) const 9556 { 9557 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9558 9559 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 9560 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9561 d.vkCreateVideoSessionKHR( m_device, 9562 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 9563 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9564 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 9565 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); 9566 9567 return createResultValueType( 9568 result, UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 9569 } 9570 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9571 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9572 9573 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9574 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9575 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9576 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9577 { 9578 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9579 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9580 } 9581 9582 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9583 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9584 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9585 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9586 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9587 { 9588 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9589 9590 d.vkDestroyVideoSessionKHR( 9591 m_device, 9592 static_cast<VkVideoSessionKHR>( videoSession ), 9593 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9594 } 9595 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9596 9597 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9598 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9599 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9600 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9601 { 9602 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9603 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9604 } 9605 9606 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9607 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9608 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9609 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9610 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9611 { 9612 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9613 9614 d.vkDestroyVideoSessionKHR( 9615 m_device, 9616 static_cast<VkVideoSessionKHR>( videoSession ), 9617 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9618 } 9619 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9620 9621 template <typename Dispatch> 9622 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t * pMemoryRequirementsCount,VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,Dispatch const & d) const9623 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9624 uint32_t * pMemoryRequirementsCount, 9625 VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, 9626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9627 { 9628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9629 return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 9630 static_cast<VkVideoSessionKHR>( videoSession ), 9631 pMemoryRequirementsCount, 9632 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); 9633 } 9634 9635 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9636 template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch> 9637 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9638 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Dispatch const & d) const9639 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const 9640 { 9641 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9642 9643 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements; 9644 uint32_t memoryRequirementsCount; 9645 VULKAN_HPP_NAMESPACE::Result result; 9646 do 9647 { 9648 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9649 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 9650 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 9651 { 9652 memoryRequirements.resize( memoryRequirementsCount ); 9653 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9654 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 9655 static_cast<VkVideoSessionKHR>( videoSession ), 9656 &memoryRequirementsCount, 9657 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 9658 } 9659 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9660 9661 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 9662 if ( memoryRequirementsCount < memoryRequirements.size() ) 9663 { 9664 memoryRequirements.resize( memoryRequirementsCount ); 9665 } 9666 return memoryRequirements; 9667 } 9668 9669 template <typename VideoSessionMemoryRequirementsKHRAllocator, 9670 typename Dispatch, 9671 typename B1, 9672 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, int>::type> 9673 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 9674 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,Dispatch const & d) const9675 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9676 VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, 9677 Dispatch const & d ) const 9678 { 9679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9680 9681 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( 9682 videoSessionMemoryRequirementsKHRAllocator ); 9683 uint32_t memoryRequirementsCount; 9684 VULKAN_HPP_NAMESPACE::Result result; 9685 do 9686 { 9687 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9688 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); 9689 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) 9690 { 9691 memoryRequirements.resize( memoryRequirementsCount ); 9692 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9693 d.vkGetVideoSessionMemoryRequirementsKHR( m_device, 9694 static_cast<VkVideoSessionKHR>( videoSession ), 9695 &memoryRequirementsCount, 9696 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); 9697 } 9698 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 9699 9700 VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); 9701 if ( memoryRequirementsCount < memoryRequirements.size() ) 9702 { 9703 memoryRequirements.resize( memoryRequirementsCount ); 9704 } 9705 return memoryRequirements; 9706 } 9707 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9708 9709 template <typename Dispatch> 9710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t bindSessionMemoryInfoCount,const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,Dispatch const & d) const9711 Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9712 uint32_t bindSessionMemoryInfoCount, 9713 const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, 9714 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9715 { 9716 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9717 return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, 9718 static_cast<VkVideoSessionKHR>( videoSession ), 9719 bindSessionMemoryInfoCount, 9720 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); 9721 } 9722 9723 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9724 template <typename Dispatch> bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,Dispatch const & d) const9725 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( 9726 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 9727 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, 9728 Dispatch const & d ) const 9729 { 9730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9731 9732 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9733 d.vkBindVideoSessionMemoryKHR( m_device, 9734 static_cast<VkVideoSessionKHR>( videoSession ), 9735 bindSessionMemoryInfos.size(), 9736 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) ); 9737 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); 9738 9739 return createResultValueType( result ); 9740 } 9741 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9742 9743 template <typename Dispatch> 9744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,Dispatch const & d) const9745 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, 9746 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9747 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, 9748 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9749 { 9750 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9751 return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, 9752 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), 9753 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9754 reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); 9755 } 9756 9757 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9758 template <typename Dispatch> 9759 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9760 Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 9761 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9762 Dispatch const & d ) const 9763 { 9764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9765 9766 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 9767 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 9768 m_device, 9769 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 9770 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9771 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 9772 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); 9773 9774 return createResultValueType( result, videoSessionParameters ); 9775 } 9776 9777 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9778 template <typename Dispatch> 9779 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type createVideoSessionParametersKHRUnique(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9780 Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, 9781 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9782 Dispatch const & d ) const 9783 { 9784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9785 9786 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 9787 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateVideoSessionParametersKHR( 9788 m_device, 9789 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 9790 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9791 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 9792 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); 9793 9794 return createResultValueType( result, 9795 UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( 9796 videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 9797 } 9798 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 9799 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9800 9801 template <typename Dispatch> 9802 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,Dispatch const & d) const9803 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 9804 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, 9805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9806 { 9807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9808 return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, 9809 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 9810 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); 9811 } 9812 9813 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9814 template <typename Dispatch> 9815 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,Dispatch const & d) const9816 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 9817 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, 9818 Dispatch const & d ) const 9819 { 9820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9821 9822 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 9823 d.vkUpdateVideoSessionParametersKHR( m_device, 9824 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 9825 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) ); 9826 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); 9827 9828 return createResultValueType( result ); 9829 } 9830 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9831 9832 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9833 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 9834 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9835 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9836 { 9837 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9838 d.vkDestroyVideoSessionParametersKHR( 9839 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9840 } 9841 9842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9843 template <typename Dispatch> destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9844 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 9845 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9846 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9847 { 9848 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9849 9850 d.vkDestroyVideoSessionParametersKHR( 9851 m_device, 9852 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 9853 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9854 } 9855 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9856 9857 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9858 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 9859 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9860 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9861 { 9862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9863 d.vkDestroyVideoSessionParametersKHR( 9864 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9865 } 9866 9867 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9868 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9869 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 9870 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 9871 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9872 { 9873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9874 9875 d.vkDestroyVideoSessionParametersKHR( 9876 m_device, 9877 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 9878 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9879 } 9880 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9881 9882 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,Dispatch const & d) const9883 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, 9884 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9885 { 9886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9887 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); 9888 } 9889 9890 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9891 template <typename Dispatch> beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,Dispatch const & d) const9892 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, 9893 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9894 { 9895 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9896 9897 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); 9898 } 9899 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9900 9901 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,Dispatch const & d) const9902 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, 9903 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9904 { 9905 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9906 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); 9907 } 9908 9909 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9910 template <typename Dispatch> endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,Dispatch const & d) const9911 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, 9912 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9913 { 9914 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9915 9916 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); 9917 } 9918 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9919 9920 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,Dispatch const & d) const9921 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, 9922 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9923 { 9924 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9925 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); 9926 } 9927 9928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9929 template <typename Dispatch> controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,Dispatch const & d) const9930 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, 9931 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9932 { 9933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9934 9935 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); 9936 } 9937 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9938 9939 //=== VK_KHR_video_decode_queue === 9940 9941 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,Dispatch const & d) const9942 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, 9943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9944 { 9945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9946 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); 9947 } 9948 9949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9950 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,Dispatch const & d) const9951 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, 9952 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9953 { 9954 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9955 9956 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) ); 9957 } 9958 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 9959 9960 //=== VK_EXT_transform_feedback === 9961 9962 template <typename Dispatch> bindTransformFeedbackBuffersEXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,Dispatch const & d) const9963 VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 9964 uint32_t bindingCount, 9965 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 9966 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 9967 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 9968 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9969 { 9970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9971 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 9972 firstBinding, 9973 bindingCount, 9974 reinterpret_cast<const VkBuffer *>( pBuffers ), 9975 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 9976 reinterpret_cast<const VkDeviceSize *>( pSizes ) ); 9977 } 9978 9979 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9980 template <typename Dispatch> 9981 VULKAN_HPP_INLINE void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,Dispatch const & d) const9982 CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 9983 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 9984 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 9985 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 9986 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 9987 { 9988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 9989 # ifdef VULKAN_HPP_NO_EXCEPTIONS 9990 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 9991 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 9992 # else 9993 if ( buffers.size() != offsets.size() ) 9994 { 9995 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); 9996 } 9997 if ( !sizes.empty() && buffers.size() != sizes.size() ) 9998 { 9999 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); 10000 } 10001 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 10002 10003 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 10004 firstBinding, 10005 buffers.size(), 10006 reinterpret_cast<const VkBuffer *>( buffers.data() ), 10007 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 10008 reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); 10009 } 10010 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10011 10012 template <typename Dispatch> beginTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const10013 VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 10014 uint32_t counterBufferCount, 10015 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 10016 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 10017 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10018 { 10019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10020 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 10021 firstCounterBuffer, 10022 counterBufferCount, 10023 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 10024 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 10025 } 10026 10027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10028 template <typename Dispatch> 10029 VULKAN_HPP_INLINE void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const10030 CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 10031 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 10032 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 10033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 10034 { 10035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10036 # ifdef VULKAN_HPP_NO_EXCEPTIONS 10037 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 10038 # else 10039 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 10040 { 10041 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 10042 } 10043 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 10044 10045 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 10046 firstCounterBuffer, 10047 counterBuffers.size(), 10048 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 10049 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 10050 } 10051 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10052 10053 template <typename Dispatch> endTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const10054 VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 10055 uint32_t counterBufferCount, 10056 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 10057 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 10058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10059 { 10060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10061 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 10062 firstCounterBuffer, 10063 counterBufferCount, 10064 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 10065 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 10066 } 10067 10068 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10069 template <typename Dispatch> 10070 VULKAN_HPP_INLINE void endTransformFeedbackEXT(uint32_t firstCounterBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const10071 CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 10072 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 10073 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 10074 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 10075 { 10076 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10077 # ifdef VULKAN_HPP_NO_EXCEPTIONS 10078 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 10079 # else 10080 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 10081 { 10082 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 10083 } 10084 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 10085 10086 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 10087 firstCounterBuffer, 10088 counterBuffers.size(), 10089 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 10090 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 10091 } 10092 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10093 10094 template <typename Dispatch> beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,uint32_t index,Dispatch const & d) const10095 VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 10096 uint32_t query, 10097 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 10098 uint32_t index, 10099 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10100 { 10101 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10102 d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); 10103 } 10104 10105 template <typename Dispatch> 10106 VULKAN_HPP_INLINE void endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,uint32_t index,Dispatch const & d) const10107 CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10108 { 10109 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10110 d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index ); 10111 } 10112 10113 template <typename Dispatch> drawIndirectByteCountEXT(uint32_t instanceCount,uint32_t firstInstance,VULKAN_HPP_NAMESPACE::Buffer counterBuffer,VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride,Dispatch const & d) const10114 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, 10115 uint32_t firstInstance, 10116 VULKAN_HPP_NAMESPACE::Buffer counterBuffer, 10117 VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, 10118 uint32_t counterOffset, 10119 uint32_t vertexStride, 10120 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10121 { 10122 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10123 d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, 10124 instanceCount, 10125 firstInstance, 10126 static_cast<VkBuffer>( counterBuffer ), 10127 static_cast<VkDeviceSize>( counterBufferOffset ), 10128 counterOffset, 10129 vertexStride ); 10130 } 10131 10132 //=== VK_NVX_binary_import === 10133 10134 template <typename Dispatch> createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,Dispatch const & d) const10135 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, 10136 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10137 VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, 10138 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10139 { 10140 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10141 return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, 10142 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), 10143 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10144 reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); 10145 } 10146 10147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10148 template <typename Dispatch> 10149 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10150 Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 10151 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10152 Dispatch const & d ) const 10153 { 10154 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10155 10156 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 10157 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10158 d.vkCreateCuModuleNVX( m_device, 10159 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 10160 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10161 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 10162 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); 10163 10164 return createResultValueType( result, module ); 10165 } 10166 10167 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10168 template <typename Dispatch> 10169 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type createCuModuleNVXUnique(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10170 Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, 10171 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10172 Dispatch const & d ) const 10173 { 10174 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10175 10176 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 10177 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10178 d.vkCreateCuModuleNVX( m_device, 10179 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 10180 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10181 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 10182 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); 10183 10184 return createResultValueType( result, 10185 UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 10186 } 10187 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10188 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10189 10190 template <typename Dispatch> createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,Dispatch const & d) const10191 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, 10192 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10193 VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, 10194 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10195 { 10196 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10197 return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, 10198 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), 10199 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10200 reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); 10201 } 10202 10203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10204 template <typename Dispatch> 10205 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10206 Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 10207 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10208 Dispatch const & d ) const 10209 { 10210 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10211 10212 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 10213 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10214 d.vkCreateCuFunctionNVX( m_device, 10215 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 10216 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10217 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 10218 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); 10219 10220 return createResultValueType( result, function ); 10221 } 10222 10223 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10224 template <typename Dispatch> 10225 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type createCuFunctionNVXUnique(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10226 Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, 10227 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10228 Dispatch const & d ) const 10229 { 10230 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10231 10232 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 10233 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10234 d.vkCreateCuFunctionNVX( m_device, 10235 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 10236 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10237 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 10238 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); 10239 10240 return createResultValueType( 10241 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 10242 } 10243 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10244 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10245 10246 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10247 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 10248 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10249 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10250 { 10251 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10252 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10253 } 10254 10255 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10256 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10257 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 10258 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10259 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10260 { 10261 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10262 10263 d.vkDestroyCuModuleNVX( m_device, 10264 static_cast<VkCuModuleNVX>( module ), 10265 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10266 } 10267 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10268 10269 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10270 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 10271 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10273 { 10274 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10275 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10276 } 10277 10278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10279 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10280 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 10281 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10283 { 10284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10285 10286 d.vkDestroyCuModuleNVX( m_device, 10287 static_cast<VkCuModuleNVX>( module ), 10288 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10289 } 10290 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10291 10292 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10293 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 10294 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10295 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10296 { 10297 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10298 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10299 } 10300 10301 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10302 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10303 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 10304 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10305 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10306 { 10307 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10308 10309 d.vkDestroyCuFunctionNVX( m_device, 10310 static_cast<VkCuFunctionNVX>( function ), 10311 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10312 } 10313 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10314 10315 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10316 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 10317 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10319 { 10320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10321 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10322 } 10323 10324 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10325 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10326 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 10327 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10328 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10329 { 10330 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10331 10332 d.vkDestroyCuFunctionNVX( m_device, 10333 static_cast<VkCuFunctionNVX>( function ), 10334 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10335 } 10336 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10337 10338 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,Dispatch const & d) const10339 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, 10340 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10341 { 10342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10343 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); 10344 } 10345 10346 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10347 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,Dispatch const & d) const10348 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, 10349 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10350 { 10351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10352 10353 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); 10354 } 10355 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10356 10357 //=== VK_NVX_image_view_handle === 10358 10359 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,Dispatch const & d) const10360 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, 10361 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10362 { 10363 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10364 return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); 10365 } 10366 10367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10368 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,Dispatch const & d) const10369 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, 10370 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10371 { 10372 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10373 10374 uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); 10375 10376 return result; 10377 } 10378 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10379 10380 template <typename Dispatch> getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,Dispatch const & d) const10381 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, 10382 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, 10383 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10384 { 10385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10386 return static_cast<Result>( 10387 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); 10388 } 10389 10390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10391 template <typename Dispatch> 10392 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,Dispatch const & d) const10393 Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const 10394 { 10395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10396 10397 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; 10398 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10399 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) ); 10400 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); 10401 10402 return createResultValueType( result, properties ); 10403 } 10404 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10405 10406 //=== VK_AMD_draw_indirect_count === 10407 10408 template <typename Dispatch> drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const10409 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 10410 VULKAN_HPP_NAMESPACE::DeviceSize offset, 10411 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 10412 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 10413 uint32_t maxDrawCount, 10414 uint32_t stride, 10415 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10416 { 10417 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10418 d.vkCmdDrawIndirectCountAMD( m_commandBuffer, 10419 static_cast<VkBuffer>( buffer ), 10420 static_cast<VkDeviceSize>( offset ), 10421 static_cast<VkBuffer>( countBuffer ), 10422 static_cast<VkDeviceSize>( countBufferOffset ), 10423 maxDrawCount, 10424 stride ); 10425 } 10426 10427 template <typename Dispatch> drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const10428 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 10429 VULKAN_HPP_NAMESPACE::DeviceSize offset, 10430 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 10431 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 10432 uint32_t maxDrawCount, 10433 uint32_t stride, 10434 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10435 { 10436 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10437 d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, 10438 static_cast<VkBuffer>( buffer ), 10439 static_cast<VkDeviceSize>( offset ), 10440 static_cast<VkBuffer>( countBuffer ), 10441 static_cast<VkDeviceSize>( countBufferOffset ), 10442 maxDrawCount, 10443 stride ); 10444 } 10445 10446 //=== VK_AMD_shader_info === 10447 10448 template <typename Dispatch> getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,size_t * pInfoSize,void * pInfo,Dispatch const & d) const10449 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 10450 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 10451 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 10452 size_t * pInfoSize, 10453 void * pInfo, 10454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10455 { 10456 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10457 return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 10458 static_cast<VkPipeline>( pipeline ), 10459 static_cast<VkShaderStageFlagBits>( shaderStage ), 10460 static_cast<VkShaderInfoTypeAMD>( infoType ), 10461 pInfoSize, 10462 pInfo ) ); 10463 } 10464 10465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10466 template <typename Uint8_tAllocator, typename Dispatch> 10467 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Dispatch const & d) const10468 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 10469 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 10470 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 10471 Dispatch const & d ) const 10472 { 10473 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10474 10475 std::vector<uint8_t, Uint8_tAllocator> info; 10476 size_t infoSize; 10477 VULKAN_HPP_NAMESPACE::Result result; 10478 do 10479 { 10480 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 10481 static_cast<VkPipeline>( pipeline ), 10482 static_cast<VkShaderStageFlagBits>( shaderStage ), 10483 static_cast<VkShaderInfoTypeAMD>( infoType ), 10484 &infoSize, 10485 nullptr ) ); 10486 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 10487 { 10488 info.resize( infoSize ); 10489 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 10490 static_cast<VkPipeline>( pipeline ), 10491 static_cast<VkShaderStageFlagBits>( shaderStage ), 10492 static_cast<VkShaderInfoTypeAMD>( infoType ), 10493 &infoSize, 10494 reinterpret_cast<void *>( info.data() ) ) ); 10495 } 10496 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10497 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 10498 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 10499 if ( infoSize < info.size() ) 10500 { 10501 info.resize( infoSize ); 10502 } 10503 return createResultValueType( result, info ); 10504 } 10505 10506 template <typename Uint8_tAllocator, 10507 typename Dispatch, 10508 typename B1, 10509 typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> 10510 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const10511 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 10512 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 10513 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 10514 Uint8_tAllocator & uint8_tAllocator, 10515 Dispatch const & d ) const 10516 { 10517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10518 10519 std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); 10520 size_t infoSize; 10521 VULKAN_HPP_NAMESPACE::Result result; 10522 do 10523 { 10524 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 10525 static_cast<VkPipeline>( pipeline ), 10526 static_cast<VkShaderStageFlagBits>( shaderStage ), 10527 static_cast<VkShaderInfoTypeAMD>( infoType ), 10528 &infoSize, 10529 nullptr ) ); 10530 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) 10531 { 10532 info.resize( infoSize ); 10533 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderInfoAMD( m_device, 10534 static_cast<VkPipeline>( pipeline ), 10535 static_cast<VkShaderStageFlagBits>( shaderStage ), 10536 static_cast<VkShaderInfoTypeAMD>( infoType ), 10537 &infoSize, 10538 reinterpret_cast<void *>( info.data() ) ) ); 10539 } 10540 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 10541 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 10542 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 10543 if ( infoSize < info.size() ) 10544 { 10545 info.resize( infoSize ); 10546 } 10547 return createResultValueType( result, info ); 10548 } 10549 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10550 10551 //=== VK_KHR_dynamic_rendering === 10552 10553 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const10554 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 10555 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10556 { 10557 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10558 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); 10559 } 10560 10561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10562 template <typename Dispatch> beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const10563 VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, 10564 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10565 { 10566 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10567 10568 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); 10569 } 10570 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10571 10572 template <typename Dispatch> endRenderingKHR(Dispatch const & d) const10573 VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10574 { 10575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10576 d.vkCmdEndRenderingKHR( m_commandBuffer ); 10577 } 10578 10579 #if defined( VK_USE_PLATFORM_GGP ) 10580 //=== VK_GGP_stream_descriptor_surface === 10581 10582 template <typename Dispatch> 10583 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10584 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, 10585 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10586 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10587 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10588 { 10589 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10590 return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, 10591 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), 10592 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10593 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10594 } 10595 10596 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10597 template <typename Dispatch> 10598 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10599 Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 10600 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10601 Dispatch const & d ) const 10602 { 10603 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10604 10605 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10606 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 10607 m_instance, 10608 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 10609 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10610 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10611 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); 10612 10613 return createResultValueType( result, surface ); 10614 } 10615 10616 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10617 template <typename Dispatch> 10618 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const10619 Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, 10620 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 10621 Dispatch const & d ) const 10622 { 10623 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10624 10625 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10626 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateStreamDescriptorSurfaceGGP( 10627 m_instance, 10628 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 10629 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10630 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10631 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); 10632 10633 return createResultValueType( 10634 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 10635 } 10636 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 10637 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10638 #endif /*VK_USE_PLATFORM_GGP*/ 10639 10640 //=== VK_NV_external_memory_capabilities === 10641 10642 template <typename Dispatch> 10643 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,Dispatch const & d) const10644 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 10645 VULKAN_HPP_NAMESPACE::ImageType type, 10646 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 10647 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 10648 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 10649 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 10650 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, 10651 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10652 { 10653 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10654 return static_cast<Result>( 10655 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 10656 static_cast<VkFormat>( format ), 10657 static_cast<VkImageType>( type ), 10658 static_cast<VkImageTiling>( tiling ), 10659 static_cast<VkImageUsageFlags>( usage ), 10660 static_cast<VkImageCreateFlags>( flags ), 10661 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 10662 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); 10663 } 10664 10665 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10666 template <typename Dispatch> 10667 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,Dispatch const & d) const10668 PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, 10669 VULKAN_HPP_NAMESPACE::ImageType type, 10670 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 10671 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 10672 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 10673 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 10674 Dispatch const & d ) const 10675 { 10676 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10677 10678 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; 10679 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10680 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, 10681 static_cast<VkFormat>( format ), 10682 static_cast<VkImageType>( type ), 10683 static_cast<VkImageTiling>( tiling ), 10684 static_cast<VkImageUsageFlags>( usage ), 10685 static_cast<VkImageCreateFlags>( flags ), 10686 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 10687 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) ); 10688 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); 10689 10690 return createResultValueType( result, externalImageFormatProperties ); 10691 } 10692 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10693 10694 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 10695 //=== VK_NV_external_memory_win32 === 10696 10697 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,HANDLE * pHandle,Dispatch const & d) const10698 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 10699 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, 10700 HANDLE * pHandle, 10701 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10702 { 10703 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10704 return static_cast<Result>( 10705 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); 10706 } 10707 10708 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10709 template <typename Dispatch> getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,Dispatch const & d) const10710 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( 10711 VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const 10712 { 10713 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10714 10715 HANDLE handle; 10716 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10717 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) ); 10718 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); 10719 10720 return createResultValueType( result, handle ); 10721 } 10722 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10723 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 10724 10725 //=== VK_KHR_get_physical_device_properties2 === 10726 10727 template <typename Dispatch> getFeatures2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const10728 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, 10729 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10730 { 10731 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10732 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 10733 } 10734 10735 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10736 template <typename Dispatch> 10737 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const & d) const10738 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10739 { 10740 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10741 10742 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 10743 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 10744 10745 return features; 10746 } 10747 10748 template <typename X, typename Y, typename... Z, typename Dispatch> 10749 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const & d) const10750 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10751 { 10752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10753 10754 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 10755 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 10756 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 10757 10758 return structureChain; 10759 } 10760 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10761 10762 template <typename Dispatch> getProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const10763 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 10764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10765 { 10766 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10767 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 10768 } 10769 10770 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10771 template <typename Dispatch> 10772 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const & d) const10773 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10774 { 10775 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10776 10777 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 10778 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 10779 10780 return properties; 10781 } 10782 10783 template <typename X, typename Y, typename... Z, typename Dispatch> 10784 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const & d) const10785 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10786 { 10787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10788 10789 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 10790 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 10791 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 10792 10793 return structureChain; 10794 } 10795 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10796 10797 template <typename Dispatch> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const10798 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 10799 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 10800 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10801 { 10802 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10803 d.vkGetPhysicalDeviceFormatProperties2KHR( 10804 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 10805 } 10806 10807 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10808 template <typename Dispatch> 10809 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const10810 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10811 { 10812 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10813 10814 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 10815 d.vkGetPhysicalDeviceFormatProperties2KHR( 10816 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 10817 10818 return formatProperties; 10819 } 10820 10821 template <typename X, typename Y, typename... Z, typename Dispatch> 10822 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const10823 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10824 { 10825 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10826 10827 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 10828 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 10829 d.vkGetPhysicalDeviceFormatProperties2KHR( 10830 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 10831 10832 return structureChain; 10833 } 10834 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10835 10836 template <typename Dispatch> 10837 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const10838 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 10839 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 10840 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10841 { 10842 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10843 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 10844 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 10845 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 10846 } 10847 10848 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10849 template <typename Dispatch> 10850 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const10851 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 10852 { 10853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10854 10855 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 10856 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10857 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 10858 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 10859 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 10860 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 10861 10862 return createResultValueType( result, imageFormatProperties ); 10863 } 10864 10865 template <typename X, typename Y, typename... Z, typename Dispatch> 10866 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const10867 PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const 10868 { 10869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10870 10871 StructureChain<X, Y, Z...> structureChain; 10872 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 10873 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 10874 d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, 10875 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 10876 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 10877 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 10878 10879 return createResultValueType( result, structureChain ); 10880 } 10881 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 10882 10883 template <typename Dispatch> getQueueFamilyProperties2KHR(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const10884 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, 10885 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 10886 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10887 { 10888 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10889 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 10890 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 10891 } 10892 10893 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10894 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 10895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(Dispatch const & d) const10896 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 10897 { 10898 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10899 10900 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 10901 uint32_t queueFamilyPropertyCount; 10902 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 10903 queueFamilyProperties.resize( queueFamilyPropertyCount ); 10904 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 10905 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 10906 10907 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 10908 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 10909 { 10910 queueFamilyProperties.resize( queueFamilyPropertyCount ); 10911 } 10912 return queueFamilyProperties; 10913 } 10914 10915 template <typename QueueFamilyProperties2Allocator, 10916 typename Dispatch, 10917 typename B1, 10918 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, int>::type> 10919 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const10920 PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const 10921 { 10922 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10923 10924 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); 10925 uint32_t queueFamilyPropertyCount; 10926 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 10927 queueFamilyProperties.resize( queueFamilyPropertyCount ); 10928 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 10929 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 10930 10931 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 10932 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 10933 { 10934 queueFamilyProperties.resize( queueFamilyPropertyCount ); 10935 } 10936 return queueFamilyProperties; 10937 } 10938 10939 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 10940 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(Dispatch const & d) const10941 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 10942 { 10943 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10944 10945 std::vector<StructureChain, StructureChainAllocator> structureChains; 10946 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 10947 uint32_t queueFamilyPropertyCount; 10948 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 10949 structureChains.resize( queueFamilyPropertyCount ); 10950 queueFamilyProperties.resize( queueFamilyPropertyCount ); 10951 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 10952 { 10953 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 10954 } 10955 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 10956 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 10957 10958 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 10959 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 10960 { 10961 structureChains.resize( queueFamilyPropertyCount ); 10962 } 10963 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 10964 { 10965 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 10966 } 10967 return structureChains; 10968 } 10969 10970 template <typename StructureChain, 10971 typename StructureChainAllocator, 10972 typename Dispatch, 10973 typename B1, 10974 typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type> 10975 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const10976 PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const 10977 { 10978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 10979 10980 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 10981 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; 10982 uint32_t queueFamilyPropertyCount; 10983 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 10984 structureChains.resize( queueFamilyPropertyCount ); 10985 queueFamilyProperties.resize( queueFamilyPropertyCount ); 10986 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 10987 { 10988 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 10989 } 10990 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 10991 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 10992 10993 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 10994 if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) 10995 { 10996 structureChains.resize( queueFamilyPropertyCount ); 10997 } 10998 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 10999 { 11000 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 11001 } 11002 return structureChains; 11003 } 11004 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11005 11006 template <typename Dispatch> getMemoryProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const11007 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 11008 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11009 { 11010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11011 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 11012 } 11013 11014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11015 template <typename Dispatch> 11016 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const & d) const11017 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11018 { 11019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11020 11021 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 11022 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 11023 11024 return memoryProperties; 11025 } 11026 11027 template <typename X, typename Y, typename... Z, typename Dispatch> 11028 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const & d) const11029 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11030 { 11031 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11032 11033 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 11034 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 11035 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 11036 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 11037 11038 return structureChain; 11039 } 11040 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11041 11042 template <typename Dispatch> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const11043 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 11044 uint32_t * pPropertyCount, 11045 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 11046 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11047 { 11048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11049 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 11050 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 11051 pPropertyCount, 11052 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 11053 } 11054 11055 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11056 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 11057 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const11058 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 11059 Dispatch const & d ) const 11060 { 11061 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11062 11063 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 11064 uint32_t propertyCount; 11065 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 11066 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 11067 properties.resize( propertyCount ); 11068 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 11069 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 11070 &propertyCount, 11071 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 11072 11073 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11074 if ( propertyCount < properties.size() ) 11075 { 11076 properties.resize( propertyCount ); 11077 } 11078 return properties; 11079 } 11080 11081 template <typename SparseImageFormatProperties2Allocator, 11082 typename Dispatch, 11083 typename B1, 11084 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, int>::type> 11085 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const11086 PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 11087 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 11088 Dispatch const & d ) const 11089 { 11090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11091 11092 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); 11093 uint32_t propertyCount; 11094 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 11095 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); 11096 properties.resize( propertyCount ); 11097 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, 11098 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 11099 &propertyCount, 11100 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 11101 11102 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11103 if ( propertyCount < properties.size() ) 11104 { 11105 properties.resize( propertyCount ); 11106 } 11107 return properties; 11108 } 11109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11110 11111 //=== VK_KHR_device_group === 11112 11113 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const11114 VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, 11115 uint32_t localDeviceIndex, 11116 uint32_t remoteDeviceIndex, 11117 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 11118 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11119 { 11120 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11121 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 11122 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 11123 } 11124 11125 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11126 template <typename Dispatch> getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const11127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( 11128 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11129 { 11130 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11131 11132 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 11133 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( 11134 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 11135 11136 return peerMemoryFeatures; 11137 } 11138 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11139 11140 template <typename Dispatch> setDeviceMaskKHR(uint32_t deviceMask,Dispatch const & d) const11141 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11142 { 11143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11144 d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); 11145 } 11146 11147 template <typename Dispatch> dispatchBaseKHR(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const11148 VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, 11149 uint32_t baseGroupY, 11150 uint32_t baseGroupZ, 11151 uint32_t groupCountX, 11152 uint32_t groupCountY, 11153 uint32_t groupCountZ, 11154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11155 { 11156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11157 d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 11158 } 11159 11160 #if defined( VK_USE_PLATFORM_VI_NN ) 11161 //=== VK_NN_vi_surface === 11162 11163 template <typename Dispatch> createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const11164 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, 11165 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11166 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 11167 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11168 { 11169 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11170 return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, 11171 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), 11172 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11173 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 11174 } 11175 11176 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11177 template <typename Dispatch> 11178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11179 Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 11180 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11181 Dispatch const & d ) const 11182 { 11183 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11184 11185 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 11186 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11187 d.vkCreateViSurfaceNN( m_instance, 11188 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 11189 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11190 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 11191 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); 11192 11193 return createResultValueType( result, surface ); 11194 } 11195 11196 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11197 template <typename Dispatch> 11198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11199 Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, 11200 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11201 Dispatch const & d ) const 11202 { 11203 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11204 11205 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 11206 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11207 d.vkCreateViSurfaceNN( m_instance, 11208 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 11209 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11210 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 11211 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); 11212 11213 return createResultValueType( 11214 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 11215 } 11216 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11217 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11218 #endif /*VK_USE_PLATFORM_VI_NN*/ 11219 11220 //=== VK_KHR_maintenance1 === 11221 11222 template <typename Dispatch> trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const11223 VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 11224 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 11225 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11226 { 11227 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11228 d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 11229 } 11230 11231 //=== VK_KHR_device_group_creation === 11232 11233 template <typename Dispatch> 11234 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDeviceGroupsKHR(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const11235 Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, 11236 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 11237 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11238 { 11239 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11240 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 11241 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 11242 } 11243 11244 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11245 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 11246 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11247 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const & d) const11248 Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const 11249 { 11250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11251 11252 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 11253 uint32_t physicalDeviceGroupCount; 11254 VULKAN_HPP_NAMESPACE::Result result; 11255 do 11256 { 11257 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 11258 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 11259 { 11260 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 11261 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 11262 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 11263 } 11264 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11265 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 11266 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 11267 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 11268 { 11269 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 11270 } 11271 return createResultValueType( result, physicalDeviceGroupProperties ); 11272 } 11273 11274 template <typename PhysicalDeviceGroupPropertiesAllocator, 11275 typename Dispatch, 11276 typename B1, 11277 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, int>::type> 11278 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11279 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const11280 Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 11281 { 11282 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11283 11284 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 11285 physicalDeviceGroupPropertiesAllocator ); 11286 uint32_t physicalDeviceGroupCount; 11287 VULKAN_HPP_NAMESPACE::Result result; 11288 do 11289 { 11290 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 11291 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) 11292 { 11293 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 11294 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 11295 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 11296 } 11297 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 11298 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 11299 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 11300 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) 11301 { 11302 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 11303 } 11304 return createResultValueType( result, physicalDeviceGroupProperties ); 11305 } 11306 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11307 11308 //=== VK_KHR_external_memory_capabilities === 11309 11310 template <typename Dispatch> getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const11311 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 11312 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 11313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11314 { 11315 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11316 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 11317 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 11318 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 11319 } 11320 11321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11322 template <typename Dispatch> 11323 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const11324 PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, 11325 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11326 { 11327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11328 11329 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 11330 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, 11331 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 11332 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 11333 11334 return externalBufferProperties; 11335 } 11336 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11337 11338 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 11339 //=== VK_KHR_external_memory_win32 === 11340 11341 template <typename Dispatch> getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const11342 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, 11343 HANDLE * pHandle, 11344 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11345 { 11346 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11347 return static_cast<Result>( 11348 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 11349 } 11350 11351 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11352 template <typename Dispatch> 11353 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const11354 Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 11355 { 11356 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11357 11358 HANDLE handle; 11359 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11360 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 11361 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); 11362 11363 return createResultValueType( result, handle ); 11364 } 11365 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11366 11367 template <typename Dispatch> 11368 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,Dispatch const & d) const11369 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 11370 HANDLE handle, 11371 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, 11372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11373 { 11374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11375 return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 11376 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 11377 handle, 11378 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); 11379 } 11380 11381 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11382 template <typename Dispatch> 11383 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,Dispatch const & d) const11384 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const 11385 { 11386 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11387 11388 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; 11389 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11390 d.vkGetMemoryWin32HandlePropertiesKHR( m_device, 11391 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 11392 handle, 11393 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) ); 11394 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); 11395 11396 return createResultValueType( result, memoryWin32HandleProperties ); 11397 } 11398 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11399 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 11400 11401 //=== VK_KHR_external_memory_fd === 11402 11403 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const11404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, 11405 int * pFd, 11406 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11407 { 11408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11409 return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 11410 } 11411 11412 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11413 template <typename Dispatch> getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,Dispatch const & d) const11414 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, 11415 Dispatch const & d ) const 11416 { 11417 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11418 11419 int fd; 11420 VULKAN_HPP_NAMESPACE::Result result = 11421 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 11422 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); 11423 11424 return createResultValueType( result, fd ); 11425 } 11426 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11427 11428 template <typename Dispatch> getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,Dispatch const & d) const11429 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 11430 int fd, 11431 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, 11432 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11433 { 11434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11435 return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( 11436 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); 11437 } 11438 11439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11440 template <typename Dispatch> 11441 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,Dispatch const & d) const11442 Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const 11443 { 11444 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11445 11446 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; 11447 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryFdPropertiesKHR( 11448 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) ); 11449 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); 11450 11451 return createResultValueType( result, memoryFdProperties ); 11452 } 11453 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11454 11455 //=== VK_KHR_external_semaphore_capabilities === 11456 11457 template <typename Dispatch> 11458 VULKAN_HPP_INLINE void getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const11459 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 11460 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 11461 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11462 { 11463 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11464 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 11465 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 11466 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 11467 } 11468 11469 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11470 template <typename Dispatch> 11471 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const11472 PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 11473 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11474 { 11475 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11476 11477 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 11478 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, 11479 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 11480 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 11481 11482 return externalSemaphoreProperties; 11483 } 11484 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11485 11486 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 11487 //=== VK_KHR_external_semaphore_win32 === 11488 11489 template <typename Dispatch> importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,Dispatch const & d) const11490 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( 11491 const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11492 { 11493 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11494 return static_cast<Result>( 11495 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); 11496 } 11497 11498 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11499 template <typename Dispatch> 11500 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,Dispatch const & d) const11501 Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, 11502 Dispatch const & d ) const 11503 { 11504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11505 11506 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11507 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) ); 11508 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); 11509 11510 return createResultValueType( result ); 11511 } 11512 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11513 11514 template <typename Dispatch> getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const11515 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( 11516 const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11517 { 11518 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11519 return static_cast<Result>( 11520 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 11521 } 11522 11523 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11524 template <typename Dispatch> 11525 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const11526 Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 11527 { 11528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11529 11530 HANDLE handle; 11531 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11532 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 11533 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); 11534 11535 return createResultValueType( result, handle ); 11536 } 11537 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11538 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 11539 11540 //=== VK_KHR_external_semaphore_fd === 11541 11542 template <typename Dispatch> importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,Dispatch const & d) const11543 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, 11544 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11545 { 11546 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11547 return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); 11548 } 11549 11550 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11551 template <typename Dispatch> 11552 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,Dispatch const & d) const11553 Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const 11554 { 11555 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11556 11557 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11558 d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) ); 11559 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); 11560 11561 return createResultValueType( result ); 11562 } 11563 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11564 11565 template <typename Dispatch> getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const11566 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, 11567 int * pFd, 11568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11569 { 11570 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11571 return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 11572 } 11573 11574 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11575 template <typename Dispatch> 11576 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,Dispatch const & d) const11577 Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 11578 { 11579 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11580 11581 int fd; 11582 VULKAN_HPP_NAMESPACE::Result result = 11583 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 11584 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); 11585 11586 return createResultValueType( result, fd ); 11587 } 11588 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11589 11590 //=== VK_KHR_push_descriptor === 11591 11592 template <typename Dispatch> pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const11593 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 11594 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 11595 uint32_t set, 11596 uint32_t descriptorWriteCount, 11597 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 11598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11599 { 11600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11601 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 11602 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 11603 static_cast<VkPipelineLayout>( layout ), 11604 set, 11605 descriptorWriteCount, 11606 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 11607 } 11608 11609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11610 template <typename Dispatch> 11611 VULKAN_HPP_INLINE void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const11612 CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 11613 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 11614 uint32_t set, 11615 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 11616 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11617 { 11618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11619 11620 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 11621 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 11622 static_cast<VkPipelineLayout>( layout ), 11623 set, 11624 descriptorWrites.size(), 11625 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 11626 } 11627 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11628 11629 template <typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const11630 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 11631 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 11632 uint32_t set, 11633 const void * pData, 11634 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11635 { 11636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11637 d.vkCmdPushDescriptorSetWithTemplateKHR( 11638 m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData ); 11639 } 11640 11641 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11642 template <typename DataType, typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,DataType const & data,Dispatch const & d) const11643 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 11644 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 11645 uint32_t set, 11646 DataType const & data, 11647 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11648 { 11649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11650 11651 d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, 11652 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 11653 static_cast<VkPipelineLayout>( layout ), 11654 set, 11655 reinterpret_cast<const void *>( &data ) ); 11656 } 11657 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11658 11659 //=== VK_EXT_conditional_rendering === 11660 11661 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,Dispatch const & d) const11662 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, 11663 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11664 { 11665 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11666 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); 11667 } 11668 11669 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11670 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,Dispatch const & d) const11671 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, 11672 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11673 { 11674 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11675 11676 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); 11677 } 11678 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11679 11680 template <typename Dispatch> endConditionalRenderingEXT(Dispatch const & d) const11681 VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11682 { 11683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11684 d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); 11685 } 11686 11687 //=== VK_KHR_descriptor_update_template === 11688 11689 template <typename Dispatch> 11690 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const11691 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 11692 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11693 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 11694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11695 { 11696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11697 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, 11698 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 11699 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11700 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 11701 } 11702 11703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11704 template <typename Dispatch> 11705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11706 Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 11707 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11708 Dispatch const & d ) const 11709 { 11710 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11711 11712 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 11713 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 11714 m_device, 11715 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 11716 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11717 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 11718 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); 11719 11720 return createResultValueType( result, descriptorUpdateTemplate ); 11721 } 11722 11723 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11724 template <typename Dispatch> 11725 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11726 Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, 11727 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11728 Dispatch const & d ) const 11729 { 11730 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11731 11732 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 11733 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDescriptorUpdateTemplateKHR( 11734 m_device, 11735 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 11736 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11737 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 11738 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); 11739 11740 return createResultValueType( result, 11741 UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 11742 descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 11743 } 11744 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11745 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11746 11747 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const11748 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 11749 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11750 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11751 { 11752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11753 d.vkDestroyDescriptorUpdateTemplateKHR( 11754 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 11755 } 11756 11757 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11758 template <typename Dispatch> destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11759 VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 11760 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11761 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11762 { 11763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11764 11765 d.vkDestroyDescriptorUpdateTemplateKHR( 11766 m_device, 11767 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 11768 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 11769 } 11770 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11771 11772 template <typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const11773 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 11774 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 11775 const void * pData, 11776 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11777 { 11778 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11779 d.vkUpdateDescriptorSetWithTemplateKHR( 11780 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); 11781 } 11782 11783 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11784 template <typename DataType, typename Dispatch> updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,DataType const & data,Dispatch const & d) const11785 VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 11786 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 11787 DataType const & data, 11788 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11789 { 11790 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11791 11792 d.vkUpdateDescriptorSetWithTemplateKHR( m_device, 11793 static_cast<VkDescriptorSet>( descriptorSet ), 11794 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 11795 reinterpret_cast<const void *>( &data ) ); 11796 } 11797 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11798 11799 //=== VK_NV_clip_space_w_scaling === 11800 11801 template <typename Dispatch> setViewportWScalingNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,Dispatch const & d) const11802 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 11803 uint32_t viewportCount, 11804 const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, 11805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11806 { 11807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11808 d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); 11809 } 11810 11811 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11812 template <typename Dispatch> 11813 VULKAN_HPP_INLINE void setViewportWScalingNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,Dispatch const & d) const11814 CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 11815 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, 11816 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11817 { 11818 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11819 11820 d.vkCmdSetViewportWScalingNV( 11821 m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); 11822 } 11823 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11824 11825 //=== VK_EXT_direct_mode_display === 11826 11827 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 11828 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const11829 VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11830 { 11831 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11832 return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 11833 } 11834 #else 11835 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const11836 VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11837 { 11838 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11839 11840 d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ); 11841 } 11842 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11843 11844 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) 11845 //=== VK_EXT_acquire_xlib_display === 11846 11847 template <typename Dispatch> acquireXlibDisplayEXT(Display * dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const11848 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, 11849 VULKAN_HPP_NAMESPACE::DisplayKHR display, 11850 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11851 { 11852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11853 return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) ); 11854 } 11855 11856 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11857 template <typename Dispatch> 11858 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireXlibDisplayEXT(Display & dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const11859 PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 11860 { 11861 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11862 11863 VULKAN_HPP_NAMESPACE::Result result = 11864 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) ); 11865 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); 11866 11867 return createResultValueType( result ); 11868 } 11869 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11870 11871 template <typename Dispatch> getRandROutputDisplayEXT(Display * dpy,RROutput rrOutput,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const11872 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, 11873 RROutput rrOutput, 11874 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 11875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11876 { 11877 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11878 return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 11879 } 11880 11881 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11882 template <typename Dispatch> 11883 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(Display & dpy,RROutput rrOutput,Dispatch const & d) const11884 PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 11885 { 11886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11887 11888 VULKAN_HPP_NAMESPACE::DisplayKHR display; 11889 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11890 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 11891 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); 11892 11893 return createResultValueType( result, display ); 11894 } 11895 11896 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11897 template <typename Dispatch> 11898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique(Display & dpy,RROutput rrOutput,Dispatch const & d) const11899 PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 11900 { 11901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11902 11903 VULKAN_HPP_NAMESPACE::DisplayKHR display; 11904 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11905 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 11906 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); 11907 11908 return createResultValueType( result, 11909 UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 11910 } 11911 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 11912 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11913 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ 11914 11915 //=== VK_EXT_display_surface_counter === 11916 11917 template <typename Dispatch> 11918 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,Dispatch const & d) const11919 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 11920 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, 11921 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11922 { 11923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11924 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 11925 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); 11926 } 11927 11928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11929 template <typename Dispatch> 11930 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const11931 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 11932 { 11933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11934 11935 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; 11936 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 11937 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) ); 11938 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); 11939 11940 return createResultValueType( result, surfaceCapabilities ); 11941 } 11942 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11943 11944 //=== VK_EXT_display_control === 11945 11946 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,Dispatch const & d) const11947 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 11948 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, 11949 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11950 { 11951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11952 return static_cast<Result>( 11953 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); 11954 } 11955 11956 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11957 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,Dispatch const & d) const11958 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 11959 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, 11960 Dispatch const & d ) const 11961 { 11962 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11963 11964 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 11965 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) ); 11966 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); 11967 11968 return createResultValueType( result ); 11969 } 11970 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 11971 11972 template <typename Dispatch> registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const11973 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, 11974 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11975 VULKAN_HPP_NAMESPACE::Fence * pFence, 11976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11977 { 11978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11979 return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, 11980 reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), 11981 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11982 reinterpret_cast<VkFence *>( pFence ) ) ); 11983 } 11984 11985 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11986 template <typename Dispatch> 11987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const11988 Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 11989 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 11990 Dispatch const & d ) const 11991 { 11992 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 11993 11994 VULKAN_HPP_NAMESPACE::Fence fence; 11995 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 11996 m_device, 11997 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 11998 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11999 reinterpret_cast<VkFence *>( &fence ) ) ); 12000 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); 12001 12002 return createResultValueType( result, fence ); 12003 } 12004 12005 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12006 template <typename Dispatch> 12007 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12008 Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, 12009 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12010 Dispatch const & d ) const 12011 { 12012 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12013 12014 VULKAN_HPP_NAMESPACE::Fence fence; 12015 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDeviceEventEXT( 12016 m_device, 12017 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 12018 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12019 reinterpret_cast<VkFence *>( &fence ) ) ); 12020 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); 12021 12022 return createResultValueType( result, 12023 UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 12024 } 12025 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12026 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12027 12028 template <typename Dispatch> registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const12029 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 12030 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, 12031 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12032 VULKAN_HPP_NAMESPACE::Fence * pFence, 12033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12034 { 12035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12036 return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, 12037 static_cast<VkDisplayKHR>( display ), 12038 reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), 12039 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12040 reinterpret_cast<VkFence *>( pFence ) ) ); 12041 } 12042 12043 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12044 template <typename Dispatch> 12045 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12046 Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 12047 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 12048 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12049 Dispatch const & d ) const 12050 { 12051 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12052 12053 VULKAN_HPP_NAMESPACE::Fence fence; 12054 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 12055 m_device, 12056 static_cast<VkDisplayKHR>( display ), 12057 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 12058 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12059 reinterpret_cast<VkFence *>( &fence ) ) ); 12060 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); 12061 12062 return createResultValueType( result, fence ); 12063 } 12064 12065 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12066 template <typename Dispatch> 12067 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12068 Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 12069 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, 12070 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12071 Dispatch const & d ) const 12072 { 12073 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12074 12075 VULKAN_HPP_NAMESPACE::Fence fence; 12076 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkRegisterDisplayEventEXT( 12077 m_device, 12078 static_cast<VkDisplayKHR>( display ), 12079 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 12080 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12081 reinterpret_cast<VkFence *>( &fence ) ) ); 12082 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); 12083 12084 return createResultValueType( result, 12085 UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 12086 } 12087 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12088 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12089 12090 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,uint64_t * pCounterValue,Dispatch const & d) const12091 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 12092 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, 12093 uint64_t * pCounterValue, 12094 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12095 { 12096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12097 return static_cast<Result>( 12098 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); 12099 } 12100 12101 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12102 template <typename Dispatch> getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,Dispatch const & d) const12103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( 12104 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const 12105 { 12106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12107 12108 uint64_t counterValue; 12109 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12110 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) ); 12111 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); 12112 12113 return createResultValueType( result, counterValue ); 12114 } 12115 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12116 12117 //=== VK_GOOGLE_display_timing === 12118 12119 template <typename Dispatch> 12120 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,Dispatch const & d) const12121 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 12122 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, 12123 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12124 { 12125 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12126 return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( 12127 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); 12128 } 12129 12130 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12131 template <typename Dispatch> 12132 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const12133 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 12134 { 12135 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12136 12137 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; 12138 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRefreshCycleDurationGOOGLE( 12139 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) ); 12140 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); 12141 12142 return createResultValueType( result, displayTimingProperties ); 12143 } 12144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12145 12146 template <typename Dispatch> 12147 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pPresentationTimingCount,VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,Dispatch const & d) const12148 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 12149 uint32_t * pPresentationTimingCount, 12150 VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, 12151 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12152 { 12153 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12154 return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, 12155 static_cast<VkSwapchainKHR>( swapchain ), 12156 pPresentationTimingCount, 12157 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); 12158 } 12159 12160 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12161 template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch> 12162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12163 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const12164 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 12165 { 12166 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12167 12168 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; 12169 uint32_t presentationTimingCount; 12170 VULKAN_HPP_NAMESPACE::Result result; 12171 do 12172 { 12173 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12174 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 12175 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 12176 { 12177 presentationTimings.resize( presentationTimingCount ); 12178 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12179 d.vkGetPastPresentationTimingGOOGLE( m_device, 12180 static_cast<VkSwapchainKHR>( swapchain ), 12181 &presentationTimingCount, 12182 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 12183 } 12184 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12185 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 12186 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 12187 if ( presentationTimingCount < presentationTimings.size() ) 12188 { 12189 presentationTimings.resize( presentationTimingCount ); 12190 } 12191 return createResultValueType( result, presentationTimings ); 12192 } 12193 12194 template <typename PastPresentationTimingGOOGLEAllocator, 12195 typename Dispatch, 12196 typename B1, 12197 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, int>::type> 12198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12199 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,Dispatch const & d) const12200 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 12201 PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, 12202 Dispatch const & d ) const 12203 { 12204 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12205 12206 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( 12207 pastPresentationTimingGOOGLEAllocator ); 12208 uint32_t presentationTimingCount; 12209 VULKAN_HPP_NAMESPACE::Result result; 12210 do 12211 { 12212 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12213 d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 12214 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) 12215 { 12216 presentationTimings.resize( presentationTimingCount ); 12217 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12218 d.vkGetPastPresentationTimingGOOGLE( m_device, 12219 static_cast<VkSwapchainKHR>( swapchain ), 12220 &presentationTimingCount, 12221 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 12222 } 12223 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12224 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 12225 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 12226 if ( presentationTimingCount < presentationTimings.size() ) 12227 { 12228 presentationTimings.resize( presentationTimingCount ); 12229 } 12230 return createResultValueType( result, presentationTimings ); 12231 } 12232 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12233 12234 //=== VK_EXT_discard_rectangles === 12235 12236 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,Dispatch const & d) const12237 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 12238 uint32_t discardRectangleCount, 12239 const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, 12240 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12241 { 12242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12243 d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); 12244 } 12245 12246 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12247 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,Dispatch const & d) const12248 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 12249 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, 12250 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12251 { 12252 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12253 12254 d.vkCmdSetDiscardRectangleEXT( 12255 m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); 12256 } 12257 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12258 12259 template <typename Dispatch> setDiscardRectangleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,Dispatch const & d) const12260 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, 12261 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12262 { 12263 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12264 d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) ); 12265 } 12266 12267 template <typename Dispatch> setDiscardRectangleModeEXT(VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,Dispatch const & d) const12268 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, 12269 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12270 { 12271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12272 d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); 12273 } 12274 12275 //=== VK_EXT_hdr_metadata === 12276 12277 template <typename Dispatch> setHdrMetadataEXT(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,Dispatch const & d) const12278 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, 12279 const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 12280 const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, 12281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12282 { 12283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12284 d.vkSetHdrMetadataEXT( 12285 m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); 12286 } 12287 12288 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12289 template <typename Dispatch> setHdrMetadataEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,Dispatch const & d) const12290 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, 12291 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, 12292 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 12293 { 12294 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12295 # ifdef VULKAN_HPP_NO_EXCEPTIONS 12296 VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); 12297 # else 12298 if ( swapchains.size() != metadata.size() ) 12299 { 12300 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); 12301 } 12302 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 12303 12304 d.vkSetHdrMetadataEXT( m_device, 12305 swapchains.size(), 12306 reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), 12307 reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); 12308 } 12309 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12310 12311 //=== VK_KHR_create_renderpass2 === 12312 12313 template <typename Dispatch> createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const12314 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 12315 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12316 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 12317 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12318 { 12319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12320 return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, 12321 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 12322 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12323 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 12324 } 12325 12326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12327 template <typename Dispatch> 12328 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12329 Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 12330 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12331 Dispatch const & d ) const 12332 { 12333 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12334 12335 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 12336 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12337 d.vkCreateRenderPass2KHR( m_device, 12338 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 12339 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12340 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 12341 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); 12342 12343 return createResultValueType( result, renderPass ); 12344 } 12345 12346 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12347 template <typename Dispatch> 12348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const12349 Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, 12350 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 12351 Dispatch const & d ) const 12352 { 12353 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12354 12355 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 12356 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12357 d.vkCreateRenderPass2KHR( m_device, 12358 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 12359 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12360 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 12361 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); 12362 12363 return createResultValueType( 12364 result, UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 12365 } 12366 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 12367 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12368 12369 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const12370 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 12371 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 12372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12373 { 12374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12375 d.vkCmdBeginRenderPass2KHR( 12376 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 12377 } 12378 12379 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12380 template <typename Dispatch> beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const12381 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, 12382 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 12383 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12384 { 12385 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12386 12387 d.vkCmdBeginRenderPass2KHR( 12388 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 12389 } 12390 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12391 12392 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const12393 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 12394 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 12395 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12396 { 12397 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12398 d.vkCmdNextSubpass2KHR( 12399 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 12400 } 12401 12402 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12403 template <typename Dispatch> nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const12404 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, 12405 const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 12406 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12407 { 12408 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12409 12410 d.vkCmdNextSubpass2KHR( 12411 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 12412 } 12413 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12414 12415 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const12416 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 12417 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12418 { 12419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12420 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 12421 } 12422 12423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12424 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const12425 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, 12426 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12427 { 12428 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12429 12430 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 12431 } 12432 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12433 12434 //=== VK_KHR_shared_presentable_image === 12435 12436 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 12437 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const12438 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 12439 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12440 { 12441 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12442 return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 12443 } 12444 #else 12445 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const12446 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 12447 Dispatch const & d ) const 12448 { 12449 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12450 12451 VULKAN_HPP_NAMESPACE::Result result = 12452 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 12453 resultCheck( result, 12454 VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", 12455 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 12456 12457 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 12458 } 12459 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12460 12461 //=== VK_KHR_external_fence_capabilities === 12462 12463 template <typename Dispatch> getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const12464 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 12465 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 12466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12467 { 12468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12469 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 12470 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 12471 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 12472 } 12473 12474 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12475 template <typename Dispatch> 12476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const12477 PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, 12478 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12479 { 12480 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12481 12482 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 12483 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, 12484 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 12485 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 12486 12487 return externalFenceProperties; 12488 } 12489 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12490 12491 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 12492 //=== VK_KHR_external_fence_win32 === 12493 12494 template <typename Dispatch> importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,Dispatch const & d) const12495 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( 12496 const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12497 { 12498 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12499 return static_cast<Result>( 12500 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); 12501 } 12502 12503 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12504 template <typename Dispatch> 12505 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,Dispatch const & d) const12506 Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const 12507 { 12508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12509 12510 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12511 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) ); 12512 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); 12513 12514 return createResultValueType( result ); 12515 } 12516 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12517 12518 template <typename Dispatch> getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const12519 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, 12520 HANDLE * pHandle, 12521 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12522 { 12523 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12524 return static_cast<Result>( 12525 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 12526 } 12527 12528 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12529 template <typename Dispatch> 12530 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const12531 Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 12532 { 12533 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12534 12535 HANDLE handle; 12536 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12537 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 12538 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); 12539 12540 return createResultValueType( result, handle ); 12541 } 12542 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12543 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 12544 12545 //=== VK_KHR_external_fence_fd === 12546 12547 template <typename Dispatch> importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,Dispatch const & d) const12548 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, 12549 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12550 { 12551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12552 return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); 12553 } 12554 12555 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12556 template <typename Dispatch> 12557 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,Dispatch const & d) const12558 Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const 12559 { 12560 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12561 12562 VULKAN_HPP_NAMESPACE::Result result = 12563 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) ); 12564 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); 12565 12566 return createResultValueType( result ); 12567 } 12568 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12569 12570 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const12571 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, 12572 int * pFd, 12573 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12574 { 12575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12576 return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 12577 } 12578 12579 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12580 template <typename Dispatch> getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,Dispatch const & d) const12581 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, 12582 Dispatch const & d ) const 12583 { 12584 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12585 12586 int fd; 12587 VULKAN_HPP_NAMESPACE::Result result = 12588 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 12589 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); 12590 12591 return createResultValueType( result, fd ); 12592 } 12593 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12594 12595 //=== VK_KHR_performance_query === 12596 12597 template <typename Dispatch> 12598 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,uint32_t * pCounterCount,VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,Dispatch const & d) const12599 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 12600 uint32_t * pCounterCount, 12601 VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, 12602 VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, 12603 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12604 { 12605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12606 return static_cast<Result>( 12607 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, 12608 queueFamilyIndex, 12609 pCounterCount, 12610 reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), 12611 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); 12612 } 12613 12614 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12615 template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch> 12616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12617 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 12618 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,Dispatch const & d) const12619 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const 12620 { 12621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12622 12623 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 12624 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 12625 data_; 12626 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 12627 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 12628 uint32_t counterCount; 12629 VULKAN_HPP_NAMESPACE::Result result; 12630 do 12631 { 12632 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12633 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 12634 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 12635 { 12636 counters.resize( counterCount ); 12637 counterDescriptions.resize( counterCount ); 12638 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 12639 m_physicalDevice, 12640 queueFamilyIndex, 12641 &counterCount, 12642 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 12643 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 12644 } 12645 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12646 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 12647 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 12648 if ( counterCount < counters.size() ) 12649 { 12650 counters.resize( counterCount ); 12651 counterDescriptions.resize( counterCount ); 12652 } 12653 return createResultValueType( result, data_ ); 12654 } 12655 12656 template <typename PerformanceCounterKHRAllocator, 12657 typename PerformanceCounterDescriptionKHRAllocator, 12658 typename Dispatch, 12659 typename B1, 12660 typename B2, 12661 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value && 12662 std::is_same<typename B2::value_type, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, 12663 int>::type> 12664 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 12665 typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 12666 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,Dispatch const & d) const12667 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 12668 PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, 12669 PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, 12670 Dispatch const & d ) const 12671 { 12672 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12673 12674 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 12675 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 12676 data_( 12677 std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); 12678 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; 12679 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; 12680 uint32_t counterCount; 12681 VULKAN_HPP_NAMESPACE::Result result; 12682 do 12683 { 12684 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12685 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 12686 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) 12687 { 12688 counters.resize( counterCount ); 12689 counterDescriptions.resize( counterCount ); 12690 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 12691 m_physicalDevice, 12692 queueFamilyIndex, 12693 &counterCount, 12694 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 12695 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 12696 } 12697 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12698 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 12699 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 12700 if ( counterCount < counters.size() ) 12701 { 12702 counters.resize( counterCount ); 12703 counterDescriptions.resize( counterCount ); 12704 } 12705 return createResultValueType( result, data_ ); 12706 } 12707 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12708 12709 template <typename Dispatch> 12710 VULKAN_HPP_INLINE void getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,uint32_t * pNumPasses,Dispatch const & d) const12711 PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, 12712 uint32_t * pNumPasses, 12713 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12714 { 12715 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12716 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 12717 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses ); 12718 } 12719 12720 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12721 template <typename Dispatch> getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,Dispatch const & d) const12722 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( 12723 const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12724 { 12725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12726 12727 uint32_t numPasses; 12728 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 12729 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses ); 12730 12731 return numPasses; 12732 } 12733 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12734 12735 template <typename Dispatch> acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,Dispatch const & d) const12736 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, 12737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12738 { 12739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12740 return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); 12741 } 12742 12743 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12744 template <typename Dispatch> 12745 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,Dispatch const & d) const12746 Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const 12747 { 12748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12749 12750 VULKAN_HPP_NAMESPACE::Result result = 12751 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) ); 12752 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); 12753 12754 return createResultValueType( result ); 12755 } 12756 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12757 12758 template <typename Dispatch> releaseProfilingLockKHR(Dispatch const & d) const12759 VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12760 { 12761 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12762 d.vkReleaseProfilingLockKHR( m_device ); 12763 } 12764 12765 //=== VK_KHR_get_surface_capabilities2 === 12766 12767 template <typename Dispatch> 12768 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,Dispatch const & d) const12769 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 12770 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, 12771 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12772 { 12773 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12774 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 12775 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 12776 reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); 12777 } 12778 12779 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12780 template <typename Dispatch> 12781 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const12782 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 12783 { 12784 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12785 12786 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; 12787 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12788 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 12789 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 12790 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 12791 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 12792 12793 return createResultValueType( result, surfaceCapabilities ); 12794 } 12795 12796 template <typename X, typename Y, typename... Z, typename Dispatch> 12797 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const12798 PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 12799 { 12800 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12801 12802 StructureChain<X, Y, Z...> structureChain; 12803 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>(); 12804 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12805 d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, 12806 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 12807 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 12808 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 12809 12810 return createResultValueType( result, structureChain ); 12811 } 12812 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12813 12814 template <typename Dispatch> getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,Dispatch const & d) const12815 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 12816 uint32_t * pSurfaceFormatCount, 12817 VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, 12818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12819 { 12820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12821 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 12822 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 12823 pSurfaceFormatCount, 12824 reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); 12825 } 12826 12827 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12828 template <typename SurfaceFormat2KHRAllocator, typename Dispatch> 12829 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const12830 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 12831 { 12832 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12833 12834 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; 12835 uint32_t surfaceFormatCount; 12836 VULKAN_HPP_NAMESPACE::Result result; 12837 do 12838 { 12839 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 12840 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 12841 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 12842 { 12843 surfaceFormats.resize( surfaceFormatCount ); 12844 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12845 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 12846 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 12847 &surfaceFormatCount, 12848 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 12849 } 12850 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12851 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 12852 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 12853 if ( surfaceFormatCount < surfaceFormats.size() ) 12854 { 12855 surfaceFormats.resize( surfaceFormatCount ); 12856 } 12857 return createResultValueType( result, surfaceFormats ); 12858 } 12859 12860 template <typename SurfaceFormat2KHRAllocator, 12861 typename Dispatch, 12862 typename B1, 12863 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, int>::type> 12864 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,Dispatch const & d) const12865 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 12866 SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, 12867 Dispatch const & d ) const 12868 { 12869 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12870 12871 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); 12872 uint32_t surfaceFormatCount; 12873 VULKAN_HPP_NAMESPACE::Result result; 12874 do 12875 { 12876 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 12877 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 12878 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 12879 { 12880 surfaceFormats.resize( surfaceFormatCount ); 12881 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12882 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 12883 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 12884 &surfaceFormatCount, 12885 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 12886 } 12887 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12888 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 12889 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 12890 if ( surfaceFormatCount < surfaceFormats.size() ) 12891 { 12892 surfaceFormats.resize( surfaceFormatCount ); 12893 } 12894 return createResultValueType( result, surfaceFormats ); 12895 } 12896 12897 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 12898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const12899 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 12900 { 12901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12902 12903 std::vector<StructureChain, StructureChainAllocator> structureChains; 12904 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 12905 uint32_t surfaceFormatCount; 12906 VULKAN_HPP_NAMESPACE::Result result; 12907 do 12908 { 12909 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 12910 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 12911 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 12912 { 12913 structureChains.resize( surfaceFormatCount ); 12914 surfaceFormats.resize( surfaceFormatCount ); 12915 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 12916 { 12917 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 12918 } 12919 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12920 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 12921 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 12922 &surfaceFormatCount, 12923 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 12924 } 12925 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12926 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 12927 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 12928 if ( surfaceFormatCount < surfaceFormats.size() ) 12929 { 12930 structureChains.resize( surfaceFormatCount ); 12931 } 12932 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 12933 { 12934 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 12935 } 12936 return createResultValueType( result, structureChains ); 12937 } 12938 12939 template <typename StructureChain, 12940 typename StructureChainAllocator, 12941 typename Dispatch, 12942 typename B1, 12943 typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type> 12944 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const12945 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 12946 StructureChainAllocator & structureChainAllocator, 12947 Dispatch const & d ) const 12948 { 12949 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12950 12951 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); 12952 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; 12953 uint32_t surfaceFormatCount; 12954 VULKAN_HPP_NAMESPACE::Result result; 12955 do 12956 { 12957 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 12958 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); 12959 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) 12960 { 12961 structureChains.resize( surfaceFormatCount ); 12962 surfaceFormats.resize( surfaceFormatCount ); 12963 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 12964 { 12965 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; 12966 } 12967 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 12968 d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, 12969 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 12970 &surfaceFormatCount, 12971 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 12972 } 12973 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 12974 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 12975 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 12976 if ( surfaceFormatCount < surfaceFormats.size() ) 12977 { 12978 structureChains.resize( surfaceFormatCount ); 12979 } 12980 for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) 12981 { 12982 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; 12983 } 12984 return createResultValueType( result, structureChains ); 12985 } 12986 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 12987 12988 //=== VK_KHR_get_display_properties2 === 12989 12990 template <typename Dispatch> getDisplayProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,Dispatch const & d) const12991 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, 12992 VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, 12993 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12994 { 12995 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 12996 return static_cast<Result>( 12997 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); 12998 } 12999 13000 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13001 template <typename DisplayProperties2KHRAllocator, typename Dispatch> 13002 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13003 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(Dispatch const & d) const13004 PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const 13005 { 13006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13007 13008 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; 13009 uint32_t propertyCount; 13010 VULKAN_HPP_NAMESPACE::Result result; 13011 do 13012 { 13013 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 13014 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 13015 { 13016 properties.resize( propertyCount ); 13017 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13018 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 13019 } 13020 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13021 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 13022 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13023 if ( propertyCount < properties.size() ) 13024 { 13025 properties.resize( propertyCount ); 13026 } 13027 return createResultValueType( result, properties ); 13028 } 13029 13030 template <typename DisplayProperties2KHRAllocator, 13031 typename Dispatch, 13032 typename B1, 13033 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, int>::type> 13034 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13035 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,Dispatch const & d) const13036 PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const 13037 { 13038 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13039 13040 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); 13041 uint32_t propertyCount; 13042 VULKAN_HPP_NAMESPACE::Result result; 13043 do 13044 { 13045 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 13046 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 13047 { 13048 properties.resize( propertyCount ); 13049 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13050 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 13051 } 13052 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13053 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 13054 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13055 if ( propertyCount < properties.size() ) 13056 { 13057 properties.resize( propertyCount ); 13058 } 13059 return createResultValueType( result, properties ); 13060 } 13061 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13062 13063 template <typename Dispatch> getDisplayPlaneProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,Dispatch const & d) const13064 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, 13065 VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, 13066 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13067 { 13068 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13069 return static_cast<Result>( 13070 d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); 13071 } 13072 13073 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13074 template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch> 13075 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13076 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(Dispatch const & d) const13077 PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const 13078 { 13079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13080 13081 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; 13082 uint32_t propertyCount; 13083 VULKAN_HPP_NAMESPACE::Result result; 13084 do 13085 { 13086 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 13087 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 13088 { 13089 properties.resize( propertyCount ); 13090 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 13091 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 13092 } 13093 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13094 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 13095 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13096 if ( propertyCount < properties.size() ) 13097 { 13098 properties.resize( propertyCount ); 13099 } 13100 return createResultValueType( result, properties ); 13101 } 13102 13103 template <typename DisplayPlaneProperties2KHRAllocator, 13104 typename Dispatch, 13105 typename B1, 13106 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, int>::type> 13107 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13108 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,Dispatch const & d) const13109 PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const 13110 { 13111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13112 13113 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator ); 13114 uint32_t propertyCount; 13115 VULKAN_HPP_NAMESPACE::Result result; 13116 do 13117 { 13118 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 13119 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 13120 { 13121 properties.resize( propertyCount ); 13122 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 13123 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 13124 } 13125 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13126 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 13127 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13128 if ( propertyCount < properties.size() ) 13129 { 13130 properties.resize( propertyCount ); 13131 } 13132 return createResultValueType( result, properties ); 13133 } 13134 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13135 13136 template <typename Dispatch> getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,Dispatch const & d) const13137 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13138 uint32_t * pPropertyCount, 13139 VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, 13140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13141 { 13142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13143 return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( 13144 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); 13145 } 13146 13147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13148 template <typename DisplayModeProperties2KHRAllocator, typename Dispatch> 13149 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13150 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const13151 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 13152 { 13153 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13154 13155 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties; 13156 uint32_t propertyCount; 13157 VULKAN_HPP_NAMESPACE::Result result; 13158 do 13159 { 13160 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13161 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 13162 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 13163 { 13164 properties.resize( propertyCount ); 13165 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 13166 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 13167 } 13168 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13169 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 13170 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13171 if ( propertyCount < properties.size() ) 13172 { 13173 properties.resize( propertyCount ); 13174 } 13175 return createResultValueType( result, properties ); 13176 } 13177 13178 template <typename DisplayModeProperties2KHRAllocator, 13179 typename Dispatch, 13180 typename B1, 13181 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, int>::type> 13182 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 13183 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,Dispatch const & d) const13184 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 13185 DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, 13186 Dispatch const & d ) const 13187 { 13188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13189 13190 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator ); 13191 uint32_t propertyCount; 13192 VULKAN_HPP_NAMESPACE::Result result; 13193 do 13194 { 13195 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13196 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 13197 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 13198 { 13199 properties.resize( propertyCount ); 13200 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayModeProperties2KHR( 13201 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 13202 } 13203 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 13204 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 13205 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 13206 if ( propertyCount < properties.size() ) 13207 { 13208 properties.resize( propertyCount ); 13209 } 13210 return createResultValueType( result, properties ); 13211 } 13212 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13213 13214 template <typename Dispatch> 13215 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,Dispatch const & d) const13216 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, 13217 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, 13218 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13219 { 13220 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13221 return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 13222 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), 13223 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); 13224 } 13225 13226 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13227 template <typename Dispatch> 13228 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo,Dispatch const & d) const13229 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const 13230 { 13231 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13232 13233 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; 13234 VULKAN_HPP_NAMESPACE::Result result = 13235 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 13236 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), 13237 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) ); 13238 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); 13239 13240 return createResultValueType( result, capabilities ); 13241 } 13242 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13243 13244 #if defined( VK_USE_PLATFORM_IOS_MVK ) 13245 //=== VK_MVK_ios_surface === 13246 13247 template <typename Dispatch> createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const13248 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, 13249 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13250 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 13251 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13252 { 13253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13254 return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, 13255 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), 13256 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13257 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 13258 } 13259 13260 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13261 template <typename Dispatch> 13262 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13263 Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 13264 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13265 Dispatch const & d ) const 13266 { 13267 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13268 13269 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 13270 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13271 d.vkCreateIOSSurfaceMVK( m_instance, 13272 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 13273 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13274 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 13275 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); 13276 13277 return createResultValueType( result, surface ); 13278 } 13279 13280 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13281 template <typename Dispatch> 13282 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13283 Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, 13284 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13285 Dispatch const & d ) const 13286 { 13287 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13288 13289 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 13290 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13291 d.vkCreateIOSSurfaceMVK( m_instance, 13292 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 13293 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13294 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 13295 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); 13296 13297 return createResultValueType( 13298 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 13299 } 13300 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13301 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13302 #endif /*VK_USE_PLATFORM_IOS_MVK*/ 13303 13304 #if defined( VK_USE_PLATFORM_MACOS_MVK ) 13305 //=== VK_MVK_macos_surface === 13306 13307 template <typename Dispatch> createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const13308 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, 13309 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13310 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 13311 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13312 { 13313 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13314 return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, 13315 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), 13316 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13317 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 13318 } 13319 13320 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13321 template <typename Dispatch> 13322 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13323 Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 13324 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13325 Dispatch const & d ) const 13326 { 13327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13328 13329 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 13330 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13331 d.vkCreateMacOSSurfaceMVK( m_instance, 13332 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 13333 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13334 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 13335 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); 13336 13337 return createResultValueType( result, surface ); 13338 } 13339 13340 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13341 template <typename Dispatch> 13342 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13343 Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, 13344 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13345 Dispatch const & d ) const 13346 { 13347 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13348 13349 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 13350 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13351 d.vkCreateMacOSSurfaceMVK( m_instance, 13352 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 13353 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13354 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 13355 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); 13356 13357 return createResultValueType( 13358 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 13359 } 13360 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13361 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13362 #endif /*VK_USE_PLATFORM_MACOS_MVK*/ 13363 13364 //=== VK_EXT_debug_utils === 13365 13366 template <typename Dispatch> setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,Dispatch const & d) const13367 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, 13368 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13369 { 13370 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13371 return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); 13372 } 13373 13374 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13375 template <typename Dispatch> 13376 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,Dispatch const & d) const13377 Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 13378 { 13379 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13380 13381 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13382 d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) ); 13383 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); 13384 13385 return createResultValueType( result ); 13386 } 13387 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13388 13389 template <typename Dispatch> setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,Dispatch const & d) const13390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, 13391 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13392 { 13393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13394 return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); 13395 } 13396 13397 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13398 template <typename Dispatch> 13399 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,Dispatch const & d) const13400 Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 13401 { 13402 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13403 13404 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13405 d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) ); 13406 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); 13407 13408 return createResultValueType( result ); 13409 } 13410 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13411 13412 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const13413 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 13414 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13415 { 13416 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13417 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 13418 } 13419 13420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13421 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const13422 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 13423 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13424 { 13425 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13426 13427 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 13428 } 13429 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13430 13431 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const13432 VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13433 { 13434 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13435 d.vkQueueEndDebugUtilsLabelEXT( m_queue ); 13436 } 13437 13438 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const13439 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 13440 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13441 { 13442 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13443 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 13444 } 13445 13446 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13447 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const13448 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 13449 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13450 { 13451 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13452 13453 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 13454 } 13455 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13456 13457 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const13458 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 13459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13460 { 13461 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13462 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 13463 } 13464 13465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13466 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const13467 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 13468 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13469 { 13470 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13471 13472 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 13473 } 13474 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13475 13476 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const13477 VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13478 { 13479 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13480 d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); 13481 } 13482 13483 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const13484 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 13485 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13486 { 13487 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13488 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 13489 } 13490 13491 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13492 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const13493 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, 13494 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13495 { 13496 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13497 13498 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 13499 } 13500 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13501 13502 template <typename Dispatch> 13503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,Dispatch const & d) const13504 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, 13505 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13506 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, 13507 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13508 { 13509 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13510 return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, 13511 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), 13512 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13513 reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); 13514 } 13515 13516 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13517 template <typename Dispatch> 13518 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13519 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 13520 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13521 Dispatch const & d ) const 13522 { 13523 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13524 13525 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 13526 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 13527 m_instance, 13528 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 13529 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13530 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 13531 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); 13532 13533 return createResultValueType( result, messenger ); 13534 } 13535 13536 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13537 template <typename Dispatch> 13538 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13539 Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, 13540 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13541 Dispatch const & d ) const 13542 { 13543 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13544 13545 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 13546 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDebugUtilsMessengerEXT( 13547 m_instance, 13548 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 13549 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13550 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 13551 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); 13552 13553 return createResultValueType( 13554 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 13555 } 13556 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13557 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13558 13559 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13560 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 13561 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13562 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13563 { 13564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13565 d.vkDestroyDebugUtilsMessengerEXT( 13566 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13567 } 13568 13569 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13570 template <typename Dispatch> destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13571 VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 13572 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13573 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13574 { 13575 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13576 13577 d.vkDestroyDebugUtilsMessengerEXT( 13578 m_instance, 13579 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 13580 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13581 } 13582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13583 13584 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13585 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 13586 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13587 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13588 { 13589 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13590 d.vkDestroyDebugUtilsMessengerEXT( 13591 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13592 } 13593 13594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13595 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13596 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 13597 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13598 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13599 { 13600 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13601 13602 d.vkDestroyDebugUtilsMessengerEXT( 13603 m_instance, 13604 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 13605 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13606 } 13607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13608 13609 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,Dispatch const & d) const13610 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 13611 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 13612 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, 13613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13614 { 13615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13616 d.vkSubmitDebugUtilsMessageEXT( m_instance, 13617 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 13618 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 13619 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); 13620 } 13621 13622 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13623 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,Dispatch const & d) const13624 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 13625 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 13626 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, 13627 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13628 { 13629 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13630 13631 d.vkSubmitDebugUtilsMessageEXT( m_instance, 13632 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 13633 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 13634 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) ); 13635 } 13636 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13637 13638 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 13639 //=== VK_ANDROID_external_memory_android_hardware_buffer === 13640 13641 template <typename Dispatch> 13642 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer * buffer,VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,Dispatch const & d) const13643 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, 13644 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, 13645 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13646 { 13647 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13648 return static_cast<Result>( 13649 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); 13650 } 13651 13652 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13653 template <typename Dispatch> 13654 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const13655 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 13656 { 13657 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13658 13659 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; 13660 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13661 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 13662 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 13663 13664 return createResultValueType( result, properties ); 13665 } 13666 13667 template <typename X, typename Y, typename... Z, typename Dispatch> 13668 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const13669 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 13670 { 13671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13672 13673 StructureChain<X, Y, Z...> structureChain; 13674 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = 13675 structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>(); 13676 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13677 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 13678 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 13679 13680 return createResultValueType( result, structureChain ); 13681 } 13682 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13683 13684 template <typename Dispatch> 13685 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer,Dispatch const & d) const13686 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, 13687 struct AHardwareBuffer ** pBuffer, 13688 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13689 { 13690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13691 return static_cast<Result>( 13692 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); 13693 } 13694 13695 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13696 template <typename Dispatch> 13697 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info,Dispatch const & d) const13698 Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const 13699 { 13700 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13701 13702 struct AHardwareBuffer * buffer; 13703 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 13704 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) ); 13705 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); 13706 13707 return createResultValueType( result, buffer ); 13708 } 13709 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13710 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 13711 13712 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 13713 //=== VK_AMDX_shader_enqueue === 13714 13715 template <typename Dispatch> 13716 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const13717 Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13718 uint32_t createInfoCount, 13719 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, 13720 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13721 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 13722 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13723 { 13724 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13725 return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, 13726 static_cast<VkPipelineCache>( pipelineCache ), 13727 createInfoCount, 13728 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ), 13729 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13730 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 13731 } 13732 13733 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13734 template <typename PipelineAllocator, typename Dispatch> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13735 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 13736 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13737 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 13738 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13739 Dispatch const & d ) const 13740 { 13741 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13742 13743 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 13744 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 13745 m_device, 13746 static_cast<VkPipelineCache>( pipelineCache ), 13747 createInfos.size(), 13748 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 13749 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13750 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 13751 resultCheck( result, 13752 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 13753 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 13754 13755 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 13756 } 13757 13758 template <typename PipelineAllocator, 13759 typename Dispatch, 13760 typename B0, 13761 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> createExecutionGraphPipelinesAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const13762 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX( 13763 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13764 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 13765 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13766 PipelineAllocator & pipelineAllocator, 13767 Dispatch const & d ) const 13768 { 13769 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13770 13771 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 13772 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 13773 m_device, 13774 static_cast<VkPipelineCache>( pipelineCache ), 13775 createInfos.size(), 13776 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 13777 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13778 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 13779 resultCheck( result, 13780 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", 13781 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 13782 13783 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 13784 } 13785 13786 template <typename Dispatch> 13787 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createExecutionGraphPipelineAMDX(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13788 Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13789 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 13790 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13791 Dispatch const & d ) const 13792 { 13793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13794 13795 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 13796 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 13797 m_device, 13798 static_cast<VkPipelineCache>( pipelineCache ), 13799 1, 13800 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 13801 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13802 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 13803 resultCheck( result, 13804 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", 13805 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 13806 13807 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); 13808 } 13809 13810 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13811 template <typename Dispatch, typename PipelineAllocator> 13812 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13813 Device::createExecutionGraphPipelinesAMDXUnique( 13814 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13815 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 13816 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13817 Dispatch const & d ) const 13818 { 13819 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13820 13821 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 13822 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 13823 m_device, 13824 static_cast<VkPipelineCache>( pipelineCache ), 13825 createInfos.size(), 13826 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 13827 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13828 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 13829 resultCheck( result, 13830 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 13831 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 13832 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 13833 uniquePipelines.reserve( createInfos.size() ); 13834 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 13835 for ( auto const & pipeline : pipelines ) 13836 { 13837 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 13838 } 13839 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 13840 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 13841 } 13842 13843 template <typename Dispatch, 13844 typename PipelineAllocator, 13845 typename B0, 13846 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 13847 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const13848 Device::createExecutionGraphPipelinesAMDXUnique( 13849 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13850 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, 13851 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13852 PipelineAllocator & pipelineAllocator, 13853 Dispatch const & d ) const 13854 { 13855 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13856 13857 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 13858 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 13859 m_device, 13860 static_cast<VkPipelineCache>( pipelineCache ), 13861 createInfos.size(), 13862 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), 13863 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13864 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 13865 resultCheck( result, 13866 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", 13867 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 13868 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 13869 uniquePipelines.reserve( createInfos.size() ); 13870 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 13871 for ( auto const & pipeline : pipelines ) 13872 { 13873 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 13874 } 13875 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 13876 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 13877 } 13878 13879 template <typename Dispatch> 13880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createExecutionGraphPipelineAMDXUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const13881 Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13882 const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, 13883 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 13884 Dispatch const & d ) const 13885 { 13886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13887 13888 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 13889 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateExecutionGraphPipelinesAMDX( 13890 m_device, 13891 static_cast<VkPipelineCache>( pipelineCache ), 13892 1, 13893 reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), 13894 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13895 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 13896 resultCheck( result, 13897 VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", 13898 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 13899 13900 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 13901 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 13902 UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 13903 } 13904 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 13905 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13906 13907 template <typename Dispatch> 13908 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,Dispatch const & d) const13909 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 13910 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, 13911 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13912 { 13913 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13914 return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 13915 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); 13916 } 13917 13918 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13919 template <typename Dispatch> 13920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type getExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,Dispatch const & d) const13921 Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const 13922 { 13923 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13924 13925 VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; 13926 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( 13927 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) ); 13928 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); 13929 13930 return createResultValueType( result, sizeInfo ); 13931 } 13932 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13933 13934 template <typename Dispatch> 13935 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,uint32_t * pNodeIndex,Dispatch const & d) const13936 Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, 13937 const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, 13938 uint32_t * pNodeIndex, 13939 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13940 { 13941 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13942 return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 13943 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) ); 13944 } 13945 13946 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13947 template <typename Dispatch> getExecutionGraphPipelineNodeIndexAMDX(VULKAN_HPP_NAMESPACE::Pipeline executionGraph,const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo,Dispatch const & d) const13948 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX( 13949 VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const 13950 { 13951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13952 13953 uint32_t nodeIndex; 13954 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( 13955 m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex ) ); 13956 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); 13957 13958 return createResultValueType( result, nodeIndex ); 13959 } 13960 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13961 13962 template <typename Dispatch> initializeGraphScratchMemoryAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,Dispatch const & d) const13963 VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 13964 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13965 { 13966 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13967 d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) ); 13968 } 13969 13970 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const13971 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 13972 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 13973 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13974 { 13975 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13976 d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 13977 } 13978 13979 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13980 template <typename Dispatch> dispatchGraphAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const13981 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 13982 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 13983 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13984 { 13985 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13986 13987 d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 13988 } 13989 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 13990 13991 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,Dispatch const & d) const13992 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 13993 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, 13994 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13995 { 13996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 13997 d.vkCmdDispatchGraphIndirectAMDX( 13998 m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); 13999 } 14000 14001 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14002 template <typename Dispatch> dispatchGraphIndirectAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,Dispatch const & d) const14003 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 14004 const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, 14005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14006 { 14007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14008 14009 d.vkCmdDispatchGraphIndirectAMDX( 14010 m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); 14011 } 14012 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14013 14014 template <typename Dispatch> dispatchGraphIndirectCountAMDX(VULKAN_HPP_NAMESPACE::DeviceAddress scratch,VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,Dispatch const & d) const14015 VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, 14016 VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, 14017 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14018 { 14019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14020 d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) ); 14021 } 14022 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 14023 14024 //=== VK_EXT_sample_locations === 14025 14026 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,Dispatch const & d) const14027 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, 14028 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14029 { 14030 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14031 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); 14032 } 14033 14034 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14035 template <typename Dispatch> setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,Dispatch const & d) const14036 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, 14037 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14038 { 14039 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14040 14041 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) ); 14042 } 14043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14044 14045 template <typename Dispatch> getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,Dispatch const & d) const14046 VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 14047 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, 14048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14049 { 14050 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14051 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 14052 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); 14053 } 14054 14055 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14056 template <typename Dispatch> 14057 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,Dispatch const & d) const14058 PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14059 { 14060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14061 14062 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; 14063 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 14064 m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) ); 14065 14066 return multisampleProperties; 14067 } 14068 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14069 14070 //=== VK_KHR_get_memory_requirements2 === 14071 14072 template <typename Dispatch> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const14073 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 14074 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 14075 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14076 { 14077 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14078 d.vkGetImageMemoryRequirements2KHR( 14079 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 14080 } 14081 14082 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14083 template <typename Dispatch> 14084 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const14085 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14086 { 14087 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14088 14089 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 14090 d.vkGetImageMemoryRequirements2KHR( 14091 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 14092 14093 return memoryRequirements; 14094 } 14095 14096 template <typename X, typename Y, typename... Z, typename Dispatch> 14097 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const14098 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14099 { 14100 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14101 14102 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 14103 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 14104 d.vkGetImageMemoryRequirements2KHR( 14105 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 14106 14107 return structureChain; 14108 } 14109 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14110 14111 template <typename Dispatch> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const14112 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 14113 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 14114 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14115 { 14116 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14117 d.vkGetBufferMemoryRequirements2KHR( 14118 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 14119 } 14120 14121 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14122 template <typename Dispatch> 14123 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const14124 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14125 { 14126 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14127 14128 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 14129 d.vkGetBufferMemoryRequirements2KHR( 14130 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 14131 14132 return memoryRequirements; 14133 } 14134 14135 template <typename X, typename Y, typename... Z, typename Dispatch> 14136 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const14137 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14138 { 14139 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14140 14141 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 14142 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 14143 d.vkGetBufferMemoryRequirements2KHR( 14144 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 14145 14146 return structureChain; 14147 } 14148 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14149 14150 template <typename Dispatch> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const14151 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 14152 uint32_t * pSparseMemoryRequirementCount, 14153 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 14154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14155 { 14156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14157 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 14158 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 14159 pSparseMemoryRequirementCount, 14160 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 14161 } 14162 14163 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14164 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 14165 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const14166 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const 14167 { 14168 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14169 14170 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 14171 uint32_t sparseMemoryRequirementCount; 14172 d.vkGetImageSparseMemoryRequirements2KHR( 14173 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 14174 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 14175 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 14176 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 14177 &sparseMemoryRequirementCount, 14178 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 14179 14180 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 14181 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 14182 { 14183 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 14184 } 14185 return sparseMemoryRequirements; 14186 } 14187 14188 template <typename SparseImageMemoryRequirements2Allocator, 14189 typename Dispatch, 14190 typename B1, 14191 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, int>::type> 14192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const14193 Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, 14194 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 14195 Dispatch const & d ) const 14196 { 14197 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14198 14199 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 14200 sparseImageMemoryRequirements2Allocator ); 14201 uint32_t sparseMemoryRequirementCount; 14202 d.vkGetImageSparseMemoryRequirements2KHR( 14203 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); 14204 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 14205 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 14206 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 14207 &sparseMemoryRequirementCount, 14208 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 14209 14210 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 14211 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 14212 { 14213 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 14214 } 14215 return sparseMemoryRequirements; 14216 } 14217 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14218 14219 //=== VK_KHR_acceleration_structure === 14220 14221 template <typename Dispatch> 14222 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,Dispatch const & d) const14223 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, 14224 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14225 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, 14226 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14227 { 14228 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14229 return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, 14230 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), 14231 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14232 reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); 14233 } 14234 14235 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14236 template <typename Dispatch> 14237 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14238 Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 14239 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14240 Dispatch const & d ) const 14241 { 14242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14243 14244 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 14245 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 14246 m_device, 14247 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 14248 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14249 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 14250 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); 14251 14252 return createResultValueType( result, accelerationStructure ); 14253 } 14254 14255 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14256 template <typename Dispatch> 14257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14258 Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, 14259 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14260 Dispatch const & d ) const 14261 { 14262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14263 14264 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 14265 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureKHR( 14266 m_device, 14267 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 14268 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14269 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 14270 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); 14271 14272 return createResultValueType( 14273 result, 14274 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 14275 } 14276 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 14277 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14278 14279 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const14280 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 14281 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14283 { 14284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14285 d.vkDestroyAccelerationStructureKHR( 14286 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 14287 } 14288 14289 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14290 template <typename Dispatch> destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14291 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 14292 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14293 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14294 { 14295 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14296 14297 d.vkDestroyAccelerationStructureKHR( 14298 m_device, 14299 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 14300 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 14301 } 14302 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14303 14304 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const14305 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 14306 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14308 { 14309 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14310 d.vkDestroyAccelerationStructureKHR( 14311 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 14312 } 14313 14314 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14315 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14316 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 14317 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14319 { 14320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14321 14322 d.vkDestroyAccelerationStructureKHR( 14323 m_device, 14324 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 14325 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 14326 } 14327 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14328 14329 template <typename Dispatch> 14330 VULKAN_HPP_INLINE void buildAccelerationStructuresKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const14331 CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, 14332 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 14333 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 14334 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14335 { 14336 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14337 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 14338 infoCount, 14339 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 14340 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); 14341 } 14342 14343 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14344 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const14345 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( 14346 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 14347 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 14348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 14349 { 14350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14351 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14352 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 14353 # else 14354 if ( infos.size() != pBuildRangeInfos.size() ) 14355 { 14356 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 14357 } 14358 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14359 14360 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, 14361 infos.size(), 14362 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 14363 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ); 14364 } 14365 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14366 14367 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,const uint32_t * pIndirectStrides,const uint32_t * const * ppMaxPrimitiveCounts,Dispatch const & d) const14368 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, 14369 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 14370 const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, 14371 const uint32_t * pIndirectStrides, 14372 const uint32_t * const * ppMaxPrimitiveCounts, 14373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14374 { 14375 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14376 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 14377 infoCount, 14378 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 14379 reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), 14380 pIndirectStrides, 14381 ppMaxPrimitiveCounts ); 14382 } 14383 14384 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14385 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,Dispatch const & d) const14386 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( 14387 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 14388 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, 14389 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides, 14390 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, 14391 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 14392 { 14393 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14394 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14395 VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); 14396 VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); 14397 VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); 14398 # else 14399 if ( infos.size() != indirectDeviceAddresses.size() ) 14400 { 14401 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); 14402 } 14403 if ( infos.size() != indirectStrides.size() ) 14404 { 14405 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); 14406 } 14407 if ( infos.size() != pMaxPrimitiveCounts.size() ) 14408 { 14409 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); 14410 } 14411 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14412 14413 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, 14414 infos.size(), 14415 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 14416 reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), 14417 indirectStrides.data(), 14418 pMaxPrimitiveCounts.data() ); 14419 } 14420 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14421 14422 template <typename Dispatch> 14423 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const14424 Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14425 uint32_t infoCount, 14426 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 14427 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 14428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14429 { 14430 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14431 return static_cast<Result>( 14432 d.vkBuildAccelerationStructuresKHR( m_device, 14433 static_cast<VkDeferredOperationKHR>( deferredOperation ), 14434 infoCount, 14435 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 14436 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) ); 14437 } 14438 14439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14440 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const14441 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( 14442 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14443 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 14444 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 14445 Dispatch const & d ) const 14446 { 14447 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14448 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14449 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 14450 # else 14451 if ( infos.size() != pBuildRangeInfos.size() ) 14452 { 14453 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 14454 } 14455 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14456 14457 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14458 d.vkBuildAccelerationStructuresKHR( m_device, 14459 static_cast<VkDeferredOperationKHR>( deferredOperation ), 14460 infos.size(), 14461 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 14462 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) ); 14463 resultCheck( 14464 result, 14465 VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", 14466 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 14467 14468 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 14469 } 14470 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14471 14472 template <typename Dispatch> copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const14473 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14474 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 14475 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14476 { 14477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14478 return static_cast<Result>( d.vkCopyAccelerationStructureKHR( 14479 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); 14480 } 14481 14482 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14483 template <typename Dispatch> 14484 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const14485 Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14486 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 14487 Dispatch const & d ) const 14488 { 14489 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14490 14491 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureKHR( 14492 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) ); 14493 resultCheck( 14494 result, 14495 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", 14496 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 14497 14498 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 14499 } 14500 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14501 14502 template <typename Dispatch> 14503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const14504 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14505 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 14506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14507 { 14508 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14509 return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( 14510 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); 14511 } 14512 14513 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14514 template <typename Dispatch> 14515 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const14516 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14517 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 14518 Dispatch const & d ) const 14519 { 14520 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14521 14522 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyAccelerationStructureToMemoryKHR( 14523 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) ); 14524 resultCheck( 14525 result, 14526 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", 14527 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 14528 14529 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 14530 } 14531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14532 14533 template <typename Dispatch> 14534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const14535 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14536 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 14537 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14538 { 14539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14540 return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( 14541 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); 14542 } 14543 14544 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14545 template <typename Dispatch> 14546 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const14547 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14548 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 14549 Dispatch const & d ) const 14550 { 14551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14552 14553 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToAccelerationStructureKHR( 14554 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) ); 14555 resultCheck( 14556 result, 14557 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", 14558 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 14559 14560 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 14561 } 14562 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14563 14564 template <typename Dispatch> 14565 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const14566 Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 14567 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 14568 VULKAN_HPP_NAMESPACE::QueryType queryType, 14569 size_t dataSize, 14570 void * pData, 14571 size_t stride, 14572 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14573 { 14574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14575 return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 14576 accelerationStructureCount, 14577 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 14578 static_cast<VkQueryType>( queryType ), 14579 dataSize, 14580 pData, 14581 stride ) ); 14582 } 14583 14584 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14585 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 14586 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const14587 Device::writeAccelerationStructuresPropertiesKHR( 14588 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 14589 VULKAN_HPP_NAMESPACE::QueryType queryType, 14590 size_t dataSize, 14591 size_t stride, 14592 Dispatch const & d ) const 14593 { 14594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14595 14596 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 14597 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 14598 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14599 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 14600 accelerationStructures.size(), 14601 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 14602 static_cast<VkQueryType>( queryType ), 14603 data.size() * sizeof( DataType ), 14604 reinterpret_cast<void *>( data.data() ), 14605 stride ) ); 14606 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); 14607 14608 return createResultValueType( result, data ); 14609 } 14610 14611 template <typename DataType, typename Dispatch> writeAccelerationStructuresPropertyKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const14612 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR( 14613 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 14614 VULKAN_HPP_NAMESPACE::QueryType queryType, 14615 size_t stride, 14616 Dispatch const & d ) const 14617 { 14618 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14619 14620 DataType data; 14621 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 14622 d.vkWriteAccelerationStructuresPropertiesKHR( m_device, 14623 accelerationStructures.size(), 14624 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 14625 static_cast<VkQueryType>( queryType ), 14626 sizeof( DataType ), 14627 reinterpret_cast<void *>( &data ), 14628 stride ) ); 14629 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); 14630 14631 return createResultValueType( result, data ); 14632 } 14633 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14634 14635 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const14636 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 14637 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14638 { 14639 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14640 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); 14641 } 14642 14643 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14644 template <typename Dispatch> copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const14645 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, 14646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14647 { 14648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14649 14650 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ); 14651 } 14652 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14653 14654 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const14655 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 14656 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14657 { 14658 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14659 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); 14660 } 14661 14662 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14663 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const14664 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, 14665 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14666 { 14667 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14668 14669 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ); 14670 } 14671 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14672 14673 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const14674 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 14675 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14676 { 14677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14678 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); 14679 } 14680 14681 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14682 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const14683 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, 14684 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14685 { 14686 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14687 14688 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ); 14689 } 14690 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14691 14692 template <typename Dispatch> getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,Dispatch const & d) const14693 VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, 14694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14695 { 14696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14697 return static_cast<DeviceAddress>( 14698 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); 14699 } 14700 14701 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14702 template <typename Dispatch> 14703 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,Dispatch const & d) const14704 Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, 14705 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14706 { 14707 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14708 14709 VkDeviceAddress result = 14710 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ); 14711 14712 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 14713 } 14714 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14715 14716 template <typename Dispatch> 14717 VULKAN_HPP_INLINE void writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const14718 CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, 14719 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 14720 VULKAN_HPP_NAMESPACE::QueryType queryType, 14721 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 14722 uint32_t firstQuery, 14723 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14724 { 14725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14726 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 14727 accelerationStructureCount, 14728 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 14729 static_cast<VkQueryType>( queryType ), 14730 static_cast<VkQueryPool>( queryPool ), 14731 firstQuery ); 14732 } 14733 14734 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14735 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const14736 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( 14737 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 14738 VULKAN_HPP_NAMESPACE::QueryType queryType, 14739 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 14740 uint32_t firstQuery, 14741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14742 { 14743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14744 14745 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, 14746 accelerationStructures.size(), 14747 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 14748 static_cast<VkQueryType>( queryType ), 14749 static_cast<VkQueryPool>( queryPool ), 14750 firstQuery ); 14751 } 14752 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14753 14754 template <typename Dispatch> getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const14755 VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, 14756 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 14757 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14758 { 14759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14760 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 14761 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), 14762 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 14763 } 14764 14765 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14766 template <typename Dispatch> 14767 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,Dispatch const & d) const14768 Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, 14769 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14770 { 14771 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14772 14773 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 14774 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, 14775 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), 14776 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 14777 14778 return compatibility; 14779 } 14780 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14781 14782 template <typename Dispatch> getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,const uint32_t * pMaxPrimitiveCounts,VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,Dispatch const & d) const14783 VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 14784 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, 14785 const uint32_t * pMaxPrimitiveCounts, 14786 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, 14787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14788 { 14789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14790 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 14791 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 14792 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), 14793 pMaxPrimitiveCounts, 14794 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); 14795 } 14796 14797 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14798 template <typename Dispatch> 14799 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,Dispatch const & d) const14800 Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 14801 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, 14802 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts, 14803 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 14804 { 14805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14806 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14807 VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); 14808 # else 14809 if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) 14810 { 14811 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); 14812 } 14813 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14814 14815 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; 14816 d.vkGetAccelerationStructureBuildSizesKHR( m_device, 14817 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 14818 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), 14819 maxPrimitiveCounts.data(), 14820 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); 14821 14822 return sizeInfo; 14823 } 14824 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14825 14826 //=== VK_KHR_ray_tracing_pipeline === 14827 14828 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const14829 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 14830 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 14831 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 14832 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 14833 uint32_t width, 14834 uint32_t height, 14835 uint32_t depth, 14836 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14837 { 14838 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14839 d.vkCmdTraceRaysKHR( m_commandBuffer, 14840 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 14841 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 14842 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 14843 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 14844 width, 14845 height, 14846 depth ); 14847 } 14848 14849 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14850 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const14851 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 14852 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 14853 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 14854 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 14855 uint32_t width, 14856 uint32_t height, 14857 uint32_t depth, 14858 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14859 { 14860 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14861 14862 d.vkCmdTraceRaysKHR( m_commandBuffer, 14863 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 14864 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 14865 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 14866 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 14867 width, 14868 height, 14869 depth ); 14870 } 14871 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 14872 14873 template <typename Dispatch> 14874 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const14875 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14876 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14877 uint32_t createInfoCount, 14878 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, 14879 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 14880 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 14881 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14882 { 14883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14884 return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, 14885 static_cast<VkDeferredOperationKHR>( deferredOperation ), 14886 static_cast<VkPipelineCache>( pipelineCache ), 14887 createInfoCount, 14888 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), 14889 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 14890 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 14891 } 14892 14893 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14894 template <typename PipelineAllocator, typename Dispatch> 14895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14896 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14897 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14898 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 14899 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14900 Dispatch const & d ) const 14901 { 14902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14903 14904 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 14905 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 14906 m_device, 14907 static_cast<VkDeferredOperationKHR>( deferredOperation ), 14908 static_cast<VkPipelineCache>( pipelineCache ), 14909 createInfos.size(), 14910 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 14911 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14912 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 14913 resultCheck( result, 14914 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 14915 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 14916 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 14917 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 14918 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14919 14920 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 14921 } 14922 14923 template <typename PipelineAllocator, 14924 typename Dispatch, 14925 typename B0, 14926 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 14927 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const14928 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14929 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14930 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 14931 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14932 PipelineAllocator & pipelineAllocator, 14933 Dispatch const & d ) const 14934 { 14935 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14936 14937 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 14938 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 14939 m_device, 14940 static_cast<VkDeferredOperationKHR>( deferredOperation ), 14941 static_cast<VkPipelineCache>( pipelineCache ), 14942 createInfos.size(), 14943 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 14944 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14945 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 14946 resultCheck( result, 14947 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 14948 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 14949 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 14950 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 14951 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14952 14953 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 14954 } 14955 14956 template <typename Dispatch> 14957 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14958 Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14959 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14960 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 14961 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14962 Dispatch const & d ) const 14963 { 14964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14965 14966 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 14967 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 14968 m_device, 14969 static_cast<VkDeferredOperationKHR>( deferredOperation ), 14970 static_cast<VkPipelineCache>( pipelineCache ), 14971 1, 14972 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 14973 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14974 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 14975 resultCheck( result, 14976 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", 14977 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 14978 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 14979 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 14980 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14981 14982 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); 14983 } 14984 14985 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14986 template <typename Dispatch, typename PipelineAllocator> 14987 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const14988 Device::createRayTracingPipelinesKHRUnique( 14989 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 14990 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14991 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 14992 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 14993 Dispatch const & d ) const 14994 { 14995 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 14996 14997 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 14998 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 14999 m_device, 15000 static_cast<VkDeferredOperationKHR>( deferredOperation ), 15001 static_cast<VkPipelineCache>( pipelineCache ), 15002 createInfos.size(), 15003 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 15004 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15005 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15006 resultCheck( result, 15007 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 15008 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 15009 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 15010 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 15011 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15012 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 15013 uniquePipelines.reserve( createInfos.size() ); 15014 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 15015 for ( auto const & pipeline : pipelines ) 15016 { 15017 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 15018 } 15019 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 15020 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 15021 } 15022 15023 template <typename Dispatch, 15024 typename PipelineAllocator, 15025 typename B0, 15026 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 15027 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const15028 Device::createRayTracingPipelinesKHRUnique( 15029 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 15030 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15031 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 15032 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15033 PipelineAllocator & pipelineAllocator, 15034 Dispatch const & d ) const 15035 { 15036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15037 15038 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 15039 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 15040 m_device, 15041 static_cast<VkDeferredOperationKHR>( deferredOperation ), 15042 static_cast<VkPipelineCache>( pipelineCache ), 15043 createInfos.size(), 15044 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 15045 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15046 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15047 resultCheck( result, 15048 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 15049 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 15050 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 15051 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 15052 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15053 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 15054 uniquePipelines.reserve( createInfos.size() ); 15055 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 15056 for ( auto const & pipeline : pipelines ) 15057 { 15058 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 15059 } 15060 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 15061 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 15062 } 15063 15064 template <typename Dispatch> 15065 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15066 Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 15067 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15068 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 15069 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15070 Dispatch const & d ) const 15071 { 15072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15073 15074 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 15075 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesKHR( 15076 m_device, 15077 static_cast<VkDeferredOperationKHR>( deferredOperation ), 15078 static_cast<VkPipelineCache>( pipelineCache ), 15079 1, 15080 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 15081 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15082 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 15083 resultCheck( result, 15084 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", 15085 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 15086 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 15087 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 15088 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15089 15090 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 15091 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 15092 UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15093 } 15094 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15095 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15096 15097 template <typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const15098 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 15099 uint32_t firstGroup, 15100 uint32_t groupCount, 15101 size_t dataSize, 15102 void * pData, 15103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15104 { 15105 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15106 return static_cast<Result>( 15107 d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 15108 } 15109 15110 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15111 template <typename DataType, typename DataTypeAllocator, typename Dispatch> getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const15112 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR( 15113 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 15114 { 15115 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15116 15117 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 15118 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 15119 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 15120 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 15121 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); 15122 15123 return createResultValueType( result, data ); 15124 } 15125 15126 template <typename DataType, typename Dispatch> 15127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const15128 Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 15129 { 15130 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15131 15132 DataType data; 15133 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 15134 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 15135 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); 15136 15137 return createResultValueType( result, data ); 15138 } 15139 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15140 15141 template <typename Dispatch> getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const15142 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 15143 uint32_t firstGroup, 15144 uint32_t groupCount, 15145 size_t dataSize, 15146 void * pData, 15147 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15148 { 15149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15150 return static_cast<Result>( 15151 d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 15152 } 15153 15154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15155 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 15156 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const15157 Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( 15158 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 15159 { 15160 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15161 15162 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 15163 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 15164 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 15165 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 15166 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); 15167 15168 return createResultValueType( result, data ); 15169 } 15170 15171 template <typename DataType, typename Dispatch> getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const15172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( 15173 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 15174 { 15175 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15176 15177 DataType data; 15178 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 15179 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 15180 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); 15181 15182 return createResultValueType( result, data ); 15183 } 15184 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15185 15186 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const15187 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 15188 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 15189 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 15190 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 15191 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 15192 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15193 { 15194 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15195 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 15196 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 15197 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 15198 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 15199 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 15200 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 15201 } 15202 15203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15204 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const15205 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 15206 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, 15207 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, 15208 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, 15209 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 15210 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15211 { 15212 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15213 15214 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, 15215 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 15216 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 15217 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 15218 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 15219 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 15220 } 15221 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15222 15223 template <typename Dispatch> getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t group,VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,Dispatch const & d) const15224 VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 15225 uint32_t group, 15226 VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, 15227 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15228 { 15229 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15230 return static_cast<DeviceSize>( 15231 d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); 15232 } 15233 15234 template <typename Dispatch> setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize,Dispatch const & d) const15235 VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15236 { 15237 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15238 d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); 15239 } 15240 15241 //=== VK_KHR_sampler_ycbcr_conversion === 15242 15243 template <typename Dispatch> 15244 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const15245 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 15246 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15247 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 15248 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15249 { 15250 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15251 return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, 15252 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 15253 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15254 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 15255 } 15256 15257 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15258 template <typename Dispatch> 15259 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15260 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 15261 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15262 Dispatch const & d ) const 15263 { 15264 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15265 15266 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 15267 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 15268 m_device, 15269 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 15270 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15271 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 15272 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); 15273 15274 return createResultValueType( result, ycbcrConversion ); 15275 } 15276 15277 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15278 template <typename Dispatch> 15279 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15280 Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, 15281 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15282 Dispatch const & d ) const 15283 { 15284 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15285 15286 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 15287 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateSamplerYcbcrConversionKHR( 15288 m_device, 15289 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 15290 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15291 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 15292 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); 15293 15294 return createResultValueType( 15295 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15296 } 15297 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15298 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15299 15300 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15301 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 15302 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15303 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15304 { 15305 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15306 d.vkDestroySamplerYcbcrConversionKHR( 15307 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15308 } 15309 15310 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15311 template <typename Dispatch> destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15312 VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 15313 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15314 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15315 { 15316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15317 15318 d.vkDestroySamplerYcbcrConversionKHR( 15319 m_device, 15320 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 15321 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15322 } 15323 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15324 15325 //=== VK_KHR_bind_memory2 === 15326 15327 template <typename Dispatch> bindBufferMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const15328 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, 15329 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 15330 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15331 { 15332 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15333 return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 15334 } 15335 15336 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15337 template <typename Dispatch> 15338 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const15339 Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, 15340 Dispatch const & d ) const 15341 { 15342 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15343 15344 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15345 d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 15346 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); 15347 15348 return createResultValueType( result ); 15349 } 15350 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15351 15352 template <typename Dispatch> bindImageMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const15353 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, 15354 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 15355 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15356 { 15357 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15358 return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 15359 } 15360 15361 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15362 template <typename Dispatch> 15363 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const15364 Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const 15365 { 15366 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15367 15368 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15369 d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 15370 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); 15371 15372 return createResultValueType( result ); 15373 } 15374 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15375 15376 //=== VK_EXT_image_drm_format_modifier === 15377 15378 template <typename Dispatch> getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,Dispatch const & d) const15379 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( 15380 VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15381 { 15382 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15383 return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 15384 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); 15385 } 15386 15387 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15388 template <typename Dispatch> 15389 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const15390 Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 15391 { 15392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15393 15394 VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; 15395 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 15396 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) ); 15397 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); 15398 15399 return createResultValueType( result, properties ); 15400 } 15401 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15402 15403 //=== VK_EXT_validation_cache === 15404 15405 template <typename Dispatch> createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,Dispatch const & d) const15406 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, 15407 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15408 VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, 15409 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15410 { 15411 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15412 return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, 15413 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), 15414 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15415 reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); 15416 } 15417 15418 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15419 template <typename Dispatch> 15420 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15421 Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 15422 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15423 Dispatch const & d ) const 15424 { 15425 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15426 15427 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 15428 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 15429 m_device, 15430 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 15431 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15432 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 15433 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); 15434 15435 return createResultValueType( result, validationCache ); 15436 } 15437 15438 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15439 template <typename Dispatch> 15440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15441 Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, 15442 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15443 Dispatch const & d ) const 15444 { 15445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15446 15447 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 15448 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateValidationCacheEXT( 15449 m_device, 15450 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 15451 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15452 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 15453 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); 15454 15455 return createResultValueType( 15456 result, UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15457 } 15458 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15459 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15460 15461 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15462 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 15463 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15464 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15465 { 15466 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15467 d.vkDestroyValidationCacheEXT( 15468 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15469 } 15470 15471 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15472 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15473 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 15474 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15475 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15476 { 15477 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15478 15479 d.vkDestroyValidationCacheEXT( 15480 m_device, 15481 static_cast<VkValidationCacheEXT>( validationCache ), 15482 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15483 } 15484 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15485 15486 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15487 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 15488 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15489 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15490 { 15491 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15492 d.vkDestroyValidationCacheEXT( 15493 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15494 } 15495 15496 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15497 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15498 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 15499 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15500 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15501 { 15502 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15503 15504 d.vkDestroyValidationCacheEXT( 15505 m_device, 15506 static_cast<VkValidationCacheEXT>( validationCache ), 15507 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15508 } 15509 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15510 15511 template <typename Dispatch> mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,Dispatch const & d) const15512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 15513 uint32_t srcCacheCount, 15514 const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, 15515 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15516 { 15517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15518 return static_cast<Result>( d.vkMergeValidationCachesEXT( 15519 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); 15520 } 15521 15522 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15523 template <typename Dispatch> 15524 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,Dispatch const & d) const15525 Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 15526 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, 15527 Dispatch const & d ) const 15528 { 15529 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15530 15531 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMergeValidationCachesEXT( 15532 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) ); 15533 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); 15534 15535 return createResultValueType( result ); 15536 } 15537 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15538 15539 template <typename Dispatch> getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,size_t * pDataSize,void * pData,Dispatch const & d) const15540 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 15541 size_t * pDataSize, 15542 void * pData, 15543 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15544 { 15545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15546 return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); 15547 } 15548 15549 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15550 template <typename Uint8_tAllocator, typename Dispatch> 15551 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Dispatch const & d) const15552 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const 15553 { 15554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15555 15556 std::vector<uint8_t, Uint8_tAllocator> data; 15557 size_t dataSize; 15558 VULKAN_HPP_NAMESPACE::Result result; 15559 do 15560 { 15561 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15562 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 15563 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 15564 { 15565 data.resize( dataSize ); 15566 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15567 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 15568 } 15569 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15570 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 15571 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 15572 if ( dataSize < data.size() ) 15573 { 15574 data.resize( dataSize ); 15575 } 15576 return createResultValueType( result, data ); 15577 } 15578 15579 template <typename Uint8_tAllocator, 15580 typename Dispatch, 15581 typename B1, 15582 typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> 15583 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const15584 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 15585 { 15586 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15587 15588 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 15589 size_t dataSize; 15590 VULKAN_HPP_NAMESPACE::Result result; 15591 do 15592 { 15593 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15594 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 15595 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 15596 { 15597 data.resize( dataSize ); 15598 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 15599 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 15600 } 15601 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 15602 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 15603 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 15604 if ( dataSize < data.size() ) 15605 { 15606 data.resize( dataSize ); 15607 } 15608 return createResultValueType( result, data ); 15609 } 15610 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15611 15612 //=== VK_NV_shading_rate_image === 15613 15614 template <typename Dispatch> bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const15615 VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, 15616 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 15617 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15618 { 15619 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15620 d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 15621 } 15622 15623 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,Dispatch const & d) const15624 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, 15625 uint32_t viewportCount, 15626 const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, 15627 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15628 { 15629 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15630 d.vkCmdSetViewportShadingRatePaletteNV( 15631 m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); 15632 } 15633 15634 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15635 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,Dispatch const & d) const15636 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( 15637 uint32_t firstViewport, 15638 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, 15639 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15640 { 15641 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15642 15643 d.vkCmdSetViewportShadingRatePaletteNV( 15644 m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) ); 15645 } 15646 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15647 15648 template <typename Dispatch> setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,uint32_t customSampleOrderCount,const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,Dispatch const & d) const15649 VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 15650 uint32_t customSampleOrderCount, 15651 const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, 15652 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15653 { 15654 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15655 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 15656 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 15657 customSampleOrderCount, 15658 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); 15659 } 15660 15661 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15662 template <typename Dispatch> 15663 VULKAN_HPP_INLINE void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,Dispatch const & d) const15664 CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 15665 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, 15666 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15667 { 15668 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15669 15670 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 15671 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 15672 customSampleOrders.size(), 15673 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) ); 15674 } 15675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15676 15677 //=== VK_NV_ray_tracing === 15678 15679 template <typename Dispatch> 15680 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,Dispatch const & d) const15681 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, 15682 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15683 VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, 15684 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15685 { 15686 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15687 return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, 15688 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), 15689 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15690 reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); 15691 } 15692 15693 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15694 template <typename Dispatch> 15695 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15696 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 15697 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15698 Dispatch const & d ) const 15699 { 15700 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15701 15702 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 15703 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 15704 m_device, 15705 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 15706 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15707 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 15708 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); 15709 15710 return createResultValueType( result, accelerationStructure ); 15711 } 15712 15713 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15714 template <typename Dispatch> 15715 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15716 Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, 15717 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15718 Dispatch const & d ) const 15719 { 15720 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15721 15722 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 15723 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateAccelerationStructureNV( 15724 m_device, 15725 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 15726 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15727 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 15728 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); 15729 15730 return createResultValueType( 15731 result, 15732 UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 15733 } 15734 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 15735 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15736 15737 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15738 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 15739 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15740 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15741 { 15742 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15743 d.vkDestroyAccelerationStructureNV( 15744 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15745 } 15746 15747 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15748 template <typename Dispatch> destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15749 VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 15750 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15751 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15752 { 15753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15754 15755 d.vkDestroyAccelerationStructureNV( 15756 m_device, 15757 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 15758 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15759 } 15760 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15761 15762 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const15763 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 15764 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15765 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15766 { 15767 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15768 d.vkDestroyAccelerationStructureNV( 15769 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 15770 } 15771 15772 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15773 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15774 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 15775 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15776 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15777 { 15778 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15779 15780 d.vkDestroyAccelerationStructureNV( 15781 m_device, 15782 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 15783 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 15784 } 15785 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15786 15787 template <typename Dispatch> 15788 VULKAN_HPP_INLINE void getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,Dispatch const & d) const15789 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, 15790 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, 15791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15792 { 15793 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15794 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 15795 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), 15796 reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); 15797 } 15798 15799 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15800 template <typename Dispatch> 15801 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const15802 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 15803 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15804 { 15805 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15806 15807 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; 15808 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 15809 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 15810 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 15811 15812 return memoryRequirements; 15813 } 15814 15815 template <typename X, typename Y, typename... Z, typename Dispatch> 15816 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const15817 Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, 15818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15819 { 15820 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15821 15822 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 15823 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>(); 15824 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, 15825 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 15826 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 15827 15828 return structureChain; 15829 } 15830 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15831 15832 template <typename Dispatch> bindAccelerationStructureMemoryNV(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,Dispatch const & d) const15833 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( 15834 uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15835 { 15836 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15837 return static_cast<Result>( 15838 d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); 15839 } 15840 15841 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15842 template <typename Dispatch> bindAccelerationStructureMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,Dispatch const & d) const15843 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( 15844 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const 15845 { 15846 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15847 15848 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindAccelerationStructureMemoryNV( 15849 m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) ); 15850 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); 15851 15852 return createResultValueType( result ); 15853 } 15854 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15855 15856 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const15857 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, 15858 VULKAN_HPP_NAMESPACE::Buffer instanceData, 15859 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 15860 VULKAN_HPP_NAMESPACE::Bool32 update, 15861 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 15862 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 15863 VULKAN_HPP_NAMESPACE::Buffer scratch, 15864 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 15865 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15866 { 15867 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15868 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 15869 reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), 15870 static_cast<VkBuffer>( instanceData ), 15871 static_cast<VkDeviceSize>( instanceOffset ), 15872 static_cast<VkBool32>( update ), 15873 static_cast<VkAccelerationStructureNV>( dst ), 15874 static_cast<VkAccelerationStructureNV>( src ), 15875 static_cast<VkBuffer>( scratch ), 15876 static_cast<VkDeviceSize>( scratchOffset ) ); 15877 } 15878 15879 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15880 template <typename Dispatch> buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const15881 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, 15882 VULKAN_HPP_NAMESPACE::Buffer instanceData, 15883 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 15884 VULKAN_HPP_NAMESPACE::Bool32 update, 15885 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 15886 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 15887 VULKAN_HPP_NAMESPACE::Buffer scratch, 15888 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 15889 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15890 { 15891 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15892 15893 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 15894 reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), 15895 static_cast<VkBuffer>( instanceData ), 15896 static_cast<VkDeviceSize>( instanceOffset ), 15897 static_cast<VkBool32>( update ), 15898 static_cast<VkAccelerationStructureNV>( dst ), 15899 static_cast<VkAccelerationStructureNV>( src ), 15900 static_cast<VkBuffer>( scratch ), 15901 static_cast<VkDeviceSize>( scratchOffset ) ); 15902 } 15903 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 15904 15905 template <typename Dispatch> copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,Dispatch const & d) const15906 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 15907 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 15908 VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, 15909 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15910 { 15911 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15912 d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, 15913 static_cast<VkAccelerationStructureNV>( dst ), 15914 static_cast<VkAccelerationStructureNV>( src ), 15915 static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); 15916 } 15917 15918 template <typename Dispatch> traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const15919 VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, 15920 VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, 15921 VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, 15922 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, 15923 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, 15924 VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, 15925 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, 15926 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, 15927 VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, 15928 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, 15929 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, 15930 uint32_t width, 15931 uint32_t height, 15932 uint32_t depth, 15933 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15934 { 15935 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15936 d.vkCmdTraceRaysNV( m_commandBuffer, 15937 static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), 15938 static_cast<VkDeviceSize>( raygenShaderBindingOffset ), 15939 static_cast<VkBuffer>( missShaderBindingTableBuffer ), 15940 static_cast<VkDeviceSize>( missShaderBindingOffset ), 15941 static_cast<VkDeviceSize>( missShaderBindingStride ), 15942 static_cast<VkBuffer>( hitShaderBindingTableBuffer ), 15943 static_cast<VkDeviceSize>( hitShaderBindingOffset ), 15944 static_cast<VkDeviceSize>( hitShaderBindingStride ), 15945 static_cast<VkBuffer>( callableShaderBindingTableBuffer ), 15946 static_cast<VkDeviceSize>( callableShaderBindingOffset ), 15947 static_cast<VkDeviceSize>( callableShaderBindingStride ), 15948 width, 15949 height, 15950 depth ); 15951 } 15952 15953 template <typename Dispatch> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const15954 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15955 uint32_t createInfoCount, 15956 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, 15957 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15958 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 15959 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15960 { 15961 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15962 return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, 15963 static_cast<VkPipelineCache>( pipelineCache ), 15964 createInfoCount, 15965 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), 15966 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15967 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 15968 } 15969 15970 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15971 template <typename PipelineAllocator, typename Dispatch> 15972 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const15973 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 15974 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 15975 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 15976 Dispatch const & d ) const 15977 { 15978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 15979 15980 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 15981 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 15982 m_device, 15983 static_cast<VkPipelineCache>( pipelineCache ), 15984 createInfos.size(), 15985 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 15986 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15987 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 15988 resultCheck( result, 15989 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 15990 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 15991 15992 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 15993 } 15994 15995 template <typename PipelineAllocator, 15996 typename Dispatch, 15997 typename B0, 15998 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::Pipeline>::value, int>::type> 15999 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const16000 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16001 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 16002 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16003 PipelineAllocator & pipelineAllocator, 16004 Dispatch const & d ) const 16005 { 16006 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16007 16008 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 16009 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 16010 m_device, 16011 static_cast<VkPipelineCache>( pipelineCache ), 16012 createInfos.size(), 16013 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 16014 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16015 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16016 resultCheck( result, 16017 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 16018 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16019 16020 return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); 16021 } 16022 16023 template <typename Dispatch> 16024 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16025 Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16026 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 16027 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16028 Dispatch const & d ) const 16029 { 16030 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16031 16032 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 16033 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 16034 m_device, 16035 static_cast<VkPipelineCache>( pipelineCache ), 16036 1, 16037 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 16038 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16039 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 16040 resultCheck( result, 16041 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", 16042 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16043 16044 return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); 16045 } 16046 16047 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16048 template <typename Dispatch, typename PipelineAllocator> 16049 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16050 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16051 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 16052 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16053 Dispatch const & d ) const 16054 { 16055 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16056 16057 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 16058 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 16059 m_device, 16060 static_cast<VkPipelineCache>( pipelineCache ), 16061 createInfos.size(), 16062 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 16063 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16064 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16065 resultCheck( result, 16066 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 16067 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16068 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 16069 uniquePipelines.reserve( createInfos.size() ); 16070 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16071 for ( auto const & pipeline : pipelines ) 16072 { 16073 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 16074 } 16075 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 16076 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 16077 } 16078 16079 template <typename Dispatch, 16080 typename PipelineAllocator, 16081 typename B0, 16082 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>::value, int>::type> 16083 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const16084 Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16085 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 16086 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16087 PipelineAllocator & pipelineAllocator, 16088 Dispatch const & d ) const 16089 { 16090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16091 16092 std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); 16093 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 16094 m_device, 16095 static_cast<VkPipelineCache>( pipelineCache ), 16096 createInfos.size(), 16097 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 16098 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16099 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 16100 resultCheck( result, 16101 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 16102 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16103 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 16104 uniquePipelines.reserve( createInfos.size() ); 16105 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16106 for ( auto const & pipeline : pipelines ) 16107 { 16108 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); 16109 } 16110 return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( 16111 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); 16112 } 16113 16114 template <typename Dispatch> 16115 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const16116 Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 16117 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 16118 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 16119 Dispatch const & d ) const 16120 { 16121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16122 16123 VULKAN_HPP_NAMESPACE::Pipeline pipeline; 16124 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateRayTracingPipelinesNV( 16125 m_device, 16126 static_cast<VkPipelineCache>( pipelineCache ), 16127 1, 16128 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 16129 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16130 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 16131 resultCheck( result, 16132 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", 16133 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 16134 16135 return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( 16136 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), 16137 UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 16138 } 16139 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16140 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16141 16142 template <typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const16143 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 16144 uint32_t firstGroup, 16145 uint32_t groupCount, 16146 size_t dataSize, 16147 void * pData, 16148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16149 { 16150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16151 return static_cast<Result>( 16152 d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 16153 } 16154 16155 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16156 template <typename DataType, typename DataTypeAllocator, typename Dispatch> getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const16157 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV( 16158 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const 16159 { 16160 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16161 16162 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 16163 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 16164 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 16165 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 16166 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); 16167 16168 return createResultValueType( result, data ); 16169 } 16170 16171 template <typename DataType, typename Dispatch> 16172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const16173 Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const 16174 { 16175 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16176 16177 DataType data; 16178 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetRayTracingShaderGroupHandlesNV( 16179 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 16180 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); 16181 16182 return createResultValueType( result, data ); 16183 } 16184 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16185 16186 template <typename Dispatch> getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,void * pData,Dispatch const & d) const16187 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 16188 size_t dataSize, 16189 void * pData, 16190 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16191 { 16192 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16193 return static_cast<Result>( 16194 d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); 16195 } 16196 16197 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16198 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 16199 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,Dispatch const & d) const16200 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const 16201 { 16202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16203 16204 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 16205 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 16206 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 16207 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); 16208 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 16209 16210 return createResultValueType( result, data ); 16211 } 16212 16213 template <typename DataType, typename Dispatch> 16214 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Dispatch const & d) const16215 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const 16216 { 16217 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16218 16219 DataType data; 16220 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureHandleNV( 16221 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); 16222 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 16223 16224 return createResultValueType( result, data ); 16225 } 16226 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16227 16228 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const16229 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, 16230 const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, 16231 VULKAN_HPP_NAMESPACE::QueryType queryType, 16232 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16233 uint32_t firstQuery, 16234 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16235 { 16236 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16237 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 16238 accelerationStructureCount, 16239 reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), 16240 static_cast<VkQueryType>( queryType ), 16241 static_cast<VkQueryPool>( queryPool ), 16242 firstQuery ); 16243 } 16244 16245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16246 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const16247 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( 16248 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, 16249 VULKAN_HPP_NAMESPACE::QueryType queryType, 16250 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16251 uint32_t firstQuery, 16252 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16253 { 16254 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16255 16256 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, 16257 accelerationStructures.size(), 16258 reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), 16259 static_cast<VkQueryType>( queryType ), 16260 static_cast<VkQueryPool>( queryPool ), 16261 firstQuery ); 16262 } 16263 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16264 16265 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16266 template <typename Dispatch> compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const16267 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 16268 uint32_t shader, 16269 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16270 { 16271 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16272 return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 16273 } 16274 #else 16275 template <typename Dispatch> 16276 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const16277 Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const 16278 { 16279 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16280 16281 VULKAN_HPP_NAMESPACE::Result result = 16282 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 16283 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); 16284 16285 return createResultValueType( result ); 16286 } 16287 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16288 16289 //=== VK_KHR_maintenance3 === 16290 16291 template <typename Dispatch> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const16292 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 16293 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 16294 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16295 { 16296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16297 d.vkGetDescriptorSetLayoutSupportKHR( 16298 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 16299 } 16300 16301 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16302 template <typename Dispatch> 16303 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const16304 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 16305 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16306 { 16307 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16308 16309 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 16310 d.vkGetDescriptorSetLayoutSupportKHR( 16311 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 16312 16313 return support; 16314 } 16315 16316 template <typename X, typename Y, typename... Z, typename Dispatch> 16317 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const16318 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, 16319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16320 { 16321 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16322 16323 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 16324 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 16325 d.vkGetDescriptorSetLayoutSupportKHR( 16326 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 16327 16328 return structureChain; 16329 } 16330 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16331 16332 //=== VK_KHR_draw_indirect_count === 16333 16334 template <typename Dispatch> drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const16335 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 16336 VULKAN_HPP_NAMESPACE::DeviceSize offset, 16337 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 16338 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 16339 uint32_t maxDrawCount, 16340 uint32_t stride, 16341 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16342 { 16343 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16344 d.vkCmdDrawIndirectCountKHR( m_commandBuffer, 16345 static_cast<VkBuffer>( buffer ), 16346 static_cast<VkDeviceSize>( offset ), 16347 static_cast<VkBuffer>( countBuffer ), 16348 static_cast<VkDeviceSize>( countBufferOffset ), 16349 maxDrawCount, 16350 stride ); 16351 } 16352 16353 template <typename Dispatch> drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const16354 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 16355 VULKAN_HPP_NAMESPACE::DeviceSize offset, 16356 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 16357 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 16358 uint32_t maxDrawCount, 16359 uint32_t stride, 16360 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16361 { 16362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16363 d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, 16364 static_cast<VkBuffer>( buffer ), 16365 static_cast<VkDeviceSize>( offset ), 16366 static_cast<VkBuffer>( countBuffer ), 16367 static_cast<VkDeviceSize>( countBufferOffset ), 16368 maxDrawCount, 16369 stride ); 16370 } 16371 16372 //=== VK_EXT_external_memory_host === 16373 16374 template <typename Dispatch> 16375 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,Dispatch const & d) const16376 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 16377 const void * pHostPointer, 16378 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, 16379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16380 { 16381 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16382 return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, 16383 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 16384 pHostPointer, 16385 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); 16386 } 16387 16388 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16389 template <typename Dispatch> 16390 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,Dispatch const & d) const16391 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 16392 const void * pHostPointer, 16393 Dispatch const & d ) const 16394 { 16395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16396 16397 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; 16398 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16399 d.vkGetMemoryHostPointerPropertiesEXT( m_device, 16400 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 16401 pHostPointer, 16402 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) ); 16403 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); 16404 16405 return createResultValueType( result, memoryHostPointerProperties ); 16406 } 16407 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16408 16409 //=== VK_AMD_buffer_marker === 16410 16411 template <typename Dispatch> writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const16412 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 16413 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 16414 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 16415 uint32_t marker, 16416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16417 { 16418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16419 d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, 16420 static_cast<VkPipelineStageFlagBits>( pipelineStage ), 16421 static_cast<VkBuffer>( dstBuffer ), 16422 static_cast<VkDeviceSize>( dstOffset ), 16423 marker ); 16424 } 16425 16426 //=== VK_EXT_calibrated_timestamps === 16427 16428 template <typename Dispatch> getCalibrateableTimeDomainsEXT(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const16429 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, 16430 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 16431 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16432 { 16433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16434 return static_cast<Result>( 16435 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 16436 } 16437 16438 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16439 template <typename TimeDomainKHRAllocator, typename Dispatch> 16440 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const & d) const16441 PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const 16442 { 16443 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16444 16445 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 16446 uint32_t timeDomainCount; 16447 VULKAN_HPP_NAMESPACE::Result result; 16448 do 16449 { 16450 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 16451 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 16452 { 16453 timeDomains.resize( timeDomainCount ); 16454 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16455 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 16456 } 16457 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 16458 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 16459 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 16460 if ( timeDomainCount < timeDomains.size() ) 16461 { 16462 timeDomains.resize( timeDomainCount ); 16463 } 16464 return createResultValueType( result, timeDomains ); 16465 } 16466 16467 template <typename TimeDomainKHRAllocator, 16468 typename Dispatch, 16469 typename B1, 16470 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 16471 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsEXT(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const16472 PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 16473 { 16474 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16475 16476 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 16477 uint32_t timeDomainCount; 16478 VULKAN_HPP_NAMESPACE::Result result; 16479 do 16480 { 16481 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 16482 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 16483 { 16484 timeDomains.resize( timeDomainCount ); 16485 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16486 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 16487 } 16488 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 16489 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 16490 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 16491 if ( timeDomainCount < timeDomains.size() ) 16492 { 16493 timeDomains.resize( timeDomainCount ); 16494 } 16495 return createResultValueType( result, timeDomains ); 16496 } 16497 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16498 16499 template <typename Dispatch> getCalibratedTimestampsEXT(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const16500 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, 16501 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 16502 uint64_t * pTimestamps, 16503 uint64_t * pMaxDeviation, 16504 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16505 { 16506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16507 return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( 16508 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); 16509 } 16510 16511 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16512 template <typename Uint64_tAllocator, typename Dispatch> 16513 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const16514 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 16515 Dispatch const & d ) const 16516 { 16517 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16518 16519 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 16520 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 16521 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 16522 uint64_t & maxDeviation = data_.second; 16523 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 16524 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 16525 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 16526 16527 return createResultValueType( result, data_ ); 16528 } 16529 16530 template <typename Uint64_tAllocator, 16531 typename Dispatch, 16532 typename B0, 16533 typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type> 16534 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const16535 Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 16536 Uint64_tAllocator & uint64_tAllocator, 16537 Dispatch const & d ) const 16538 { 16539 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16540 16541 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 16542 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 16543 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 16544 uint64_t & maxDeviation = data_.second; 16545 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsEXT( 16546 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 16547 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 16548 16549 return createResultValueType( result, data_ ); 16550 } 16551 16552 template <typename Dispatch> 16553 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const16554 Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 16555 { 16556 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16557 16558 std::pair<uint64_t, uint64_t> data_; 16559 uint64_t & timestamp = data_.first; 16560 uint64_t & maxDeviation = data_.second; 16561 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16562 d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 16563 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); 16564 16565 return createResultValueType( result, data_ ); 16566 } 16567 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16568 16569 //=== VK_NV_mesh_shader === 16570 16571 template <typename Dispatch> drawMeshTasksNV(uint32_t taskCount,uint32_t firstTask,Dispatch const & d) const16572 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16573 { 16574 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16575 d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); 16576 } 16577 16578 template <typename Dispatch> drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const16579 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 16580 VULKAN_HPP_NAMESPACE::DeviceSize offset, 16581 uint32_t drawCount, 16582 uint32_t stride, 16583 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16584 { 16585 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16586 d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 16587 } 16588 16589 template <typename Dispatch> drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const16590 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 16591 VULKAN_HPP_NAMESPACE::DeviceSize offset, 16592 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 16593 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 16594 uint32_t maxDrawCount, 16595 uint32_t stride, 16596 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16597 { 16598 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16599 d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, 16600 static_cast<VkBuffer>( buffer ), 16601 static_cast<VkDeviceSize>( offset ), 16602 static_cast<VkBuffer>( countBuffer ), 16603 static_cast<VkDeviceSize>( countBufferOffset ), 16604 maxDrawCount, 16605 stride ); 16606 } 16607 16608 //=== VK_NV_scissor_exclusive === 16609 16610 template <typename Dispatch> setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables,Dispatch const & d) const16611 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 16612 uint32_t exclusiveScissorCount, 16613 const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, 16614 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16615 { 16616 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16617 d.vkCmdSetExclusiveScissorEnableNV( 16618 m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); 16619 } 16620 16621 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16622 template <typename Dispatch> 16623 VULKAN_HPP_INLINE void setExclusiveScissorEnableNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables,Dispatch const & d) const16624 CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, 16625 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables, 16626 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16627 { 16628 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16629 16630 d.vkCmdSetExclusiveScissorEnableNV( 16631 m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) ); 16632 } 16633 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16634 16635 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,Dispatch const & d) const16636 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 16637 uint32_t exclusiveScissorCount, 16638 const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, 16639 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16640 { 16641 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16642 d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); 16643 } 16644 16645 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16646 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,Dispatch const & d) const16647 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 16648 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, 16649 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16650 { 16651 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16652 16653 d.vkCmdSetExclusiveScissorNV( 16654 m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) ); 16655 } 16656 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16657 16658 //=== VK_NV_device_diagnostic_checkpoints === 16659 16660 template <typename Dispatch> setCheckpointNV(const void * pCheckpointMarker,Dispatch const & d) const16661 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16662 { 16663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16664 d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); 16665 } 16666 16667 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16668 template <typename CheckpointMarkerType, typename Dispatch> setCheckpointNV(CheckpointMarkerType const & checkpointMarker,Dispatch const & d) const16669 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16670 { 16671 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16672 16673 d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) ); 16674 } 16675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16676 16677 template <typename Dispatch> getCheckpointDataNV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,Dispatch const & d) const16678 VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, 16679 VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, 16680 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16681 { 16682 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16683 d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); 16684 } 16685 16686 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16687 template <typename CheckpointDataNVAllocator, typename Dispatch> 16688 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(Dispatch const & d) const16689 Queue::getCheckpointDataNV( Dispatch const & d ) const 16690 { 16691 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16692 16693 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData; 16694 uint32_t checkpointDataCount; 16695 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 16696 checkpointData.resize( checkpointDataCount ); 16697 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 16698 16699 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 16700 if ( checkpointDataCount < checkpointData.size() ) 16701 { 16702 checkpointData.resize( checkpointDataCount ); 16703 } 16704 return checkpointData; 16705 } 16706 16707 template <typename CheckpointDataNVAllocator, 16708 typename Dispatch, 16709 typename B1, 16710 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, int>::type> 16711 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(CheckpointDataNVAllocator & checkpointDataNVAllocator,Dispatch const & d) const16712 Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const 16713 { 16714 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16715 16716 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator ); 16717 uint32_t checkpointDataCount; 16718 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 16719 checkpointData.resize( checkpointDataCount ); 16720 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 16721 16722 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 16723 if ( checkpointDataCount < checkpointData.size() ) 16724 { 16725 checkpointData.resize( checkpointDataCount ); 16726 } 16727 return checkpointData; 16728 } 16729 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16730 16731 //=== VK_KHR_timeline_semaphore === 16732 16733 template <typename Dispatch> getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const16734 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 16735 uint64_t * pValue, 16736 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16737 { 16738 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16739 return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 16740 } 16741 16742 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16743 template <typename Dispatch> 16744 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const16745 Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const 16746 { 16747 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16748 16749 uint64_t value; 16750 VULKAN_HPP_NAMESPACE::Result result = 16751 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 16752 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); 16753 16754 return createResultValueType( result, value ); 16755 } 16756 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16757 16758 template <typename Dispatch> waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const16759 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 16760 uint64_t timeout, 16761 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16762 { 16763 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16764 return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 16765 } 16766 16767 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16768 template <typename Dispatch> 16769 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const16770 Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const 16771 { 16772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16773 16774 VULKAN_HPP_NAMESPACE::Result result = 16775 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 16776 resultCheck( 16777 result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 16778 16779 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 16780 } 16781 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16782 16783 template <typename Dispatch> signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const16784 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, 16785 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16786 { 16787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16788 return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 16789 } 16790 16791 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16792 template <typename Dispatch> 16793 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const16794 Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 16795 { 16796 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16797 16798 VULKAN_HPP_NAMESPACE::Result result = 16799 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 16800 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); 16801 16802 return createResultValueType( result ); 16803 } 16804 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16805 16806 //=== VK_INTEL_performance_query === 16807 16808 template <typename Dispatch> initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,Dispatch const & d) const16809 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( 16810 const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16811 { 16812 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16813 return static_cast<Result>( 16814 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); 16815 } 16816 16817 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16818 template <typename Dispatch> 16819 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo,Dispatch const & d) const16820 Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const 16821 { 16822 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16823 16824 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16825 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) ); 16826 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); 16827 16828 return createResultValueType( result ); 16829 } 16830 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16831 16832 template <typename Dispatch> uninitializePerformanceApiINTEL(Dispatch const & d) const16833 VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16834 { 16835 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16836 d.vkUninitializePerformanceApiINTEL( m_device ); 16837 } 16838 16839 template <typename Dispatch> setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const16840 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, 16841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16842 { 16843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16844 return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); 16845 } 16846 16847 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16848 template <typename Dispatch> 16849 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo,Dispatch const & d) const16850 CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 16851 { 16852 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16853 16854 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16855 d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) ); 16856 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); 16857 16858 return createResultValueType( result ); 16859 } 16860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16861 16862 template <typename Dispatch> setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const16863 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( 16864 const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16865 { 16866 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16867 return static_cast<Result>( 16868 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); 16869 } 16870 16871 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16872 template <typename Dispatch> 16873 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo,Dispatch const & d) const16874 CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 16875 { 16876 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16877 16878 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16879 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) ); 16880 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); 16881 16882 return createResultValueType( result ); 16883 } 16884 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16885 16886 template <typename Dispatch> setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,Dispatch const & d) const16887 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( 16888 const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16889 { 16890 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16891 return static_cast<Result>( 16892 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); 16893 } 16894 16895 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16896 template <typename Dispatch> 16897 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo,Dispatch const & d) const16898 CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const 16899 { 16900 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16901 16902 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16903 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) ); 16904 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); 16905 16906 return createResultValueType( result ); 16907 } 16908 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16909 16910 template <typename Dispatch> 16911 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,Dispatch const & d) const16912 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, 16913 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, 16914 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16915 { 16916 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16917 return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, 16918 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), 16919 reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); 16920 } 16921 16922 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16923 template <typename Dispatch> 16924 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const16925 Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const 16926 { 16927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16928 16929 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 16930 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16931 d.vkAcquirePerformanceConfigurationINTEL( m_device, 16932 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 16933 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 16934 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); 16935 16936 return createResultValueType( result, configuration ); 16937 } 16938 16939 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16940 template <typename Dispatch> 16941 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const16942 Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, 16943 Dispatch const & d ) const 16944 { 16945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16946 16947 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 16948 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16949 d.vkAcquirePerformanceConfigurationINTEL( m_device, 16950 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 16951 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 16952 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); 16953 16954 return createResultValueType( 16955 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) ); 16956 } 16957 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 16958 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 16959 16960 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16961 template <typename Dispatch> releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const16962 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 16963 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16964 { 16965 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16966 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 16967 } 16968 #else 16969 template <typename Dispatch> 16970 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const16971 Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 16972 { 16973 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16974 16975 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16976 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 16977 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); 16978 16979 return createResultValueType( result ); 16980 } 16981 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16982 16983 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16984 template <typename Dispatch> release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const16985 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 16986 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16987 { 16988 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16989 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 16990 } 16991 #else 16992 template <typename Dispatch> 16993 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const16994 Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 16995 { 16996 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 16997 16998 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 16999 d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 17000 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); 17001 17002 return createResultValueType( result ); 17003 } 17004 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17005 17006 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 17007 template <typename Dispatch> setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const17008 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 17009 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17010 { 17011 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17012 return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 17013 } 17014 #else 17015 template <typename Dispatch> 17016 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const17017 Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 17018 { 17019 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17020 17021 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17022 d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 17023 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); 17024 17025 return createResultValueType( result ); 17026 } 17027 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17028 17029 template <typename Dispatch> getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,Dispatch const & d) const17030 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, 17031 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, 17032 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17033 { 17034 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17035 return static_cast<Result>( d.vkGetPerformanceParameterINTEL( 17036 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); 17037 } 17038 17039 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17040 template <typename Dispatch> 17041 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,Dispatch const & d) const17042 Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const 17043 { 17044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17045 17046 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; 17047 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPerformanceParameterINTEL( 17048 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) ); 17049 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); 17050 17051 return createResultValueType( result, value ); 17052 } 17053 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17054 17055 //=== VK_AMD_display_native_hdr === 17056 17057 template <typename Dispatch> setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,Dispatch const & d) const17058 VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, 17059 VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, 17060 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17061 { 17062 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17063 d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); 17064 } 17065 17066 #if defined( VK_USE_PLATFORM_FUCHSIA ) 17067 //=== VK_FUCHSIA_imagepipe_surface === 17068 17069 template <typename Dispatch> 17070 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const17071 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, 17072 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17073 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 17074 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17075 { 17076 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17077 return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, 17078 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), 17079 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17080 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 17081 } 17082 17083 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17084 template <typename Dispatch> 17085 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17086 Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 17087 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17088 Dispatch const & d ) const 17089 { 17090 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17091 17092 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17093 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 17094 m_instance, 17095 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 17096 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17097 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17098 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); 17099 17100 return createResultValueType( result, surface ); 17101 } 17102 17103 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17104 template <typename Dispatch> 17105 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17106 Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 17107 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17108 Dispatch const & d ) const 17109 { 17110 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17111 17112 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17113 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 17114 m_instance, 17115 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 17116 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17117 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17118 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); 17119 17120 return createResultValueType( 17121 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 17122 } 17123 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17124 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17125 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 17126 17127 #if defined( VK_USE_PLATFORM_METAL_EXT ) 17128 //=== VK_EXT_metal_surface === 17129 17130 template <typename Dispatch> createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const17131 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, 17132 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17133 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 17134 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17135 { 17136 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17137 return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, 17138 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), 17139 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17140 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 17141 } 17142 17143 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17144 template <typename Dispatch> 17145 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17146 Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 17147 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17148 Dispatch const & d ) const 17149 { 17150 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17151 17152 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17153 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17154 d.vkCreateMetalSurfaceEXT( m_instance, 17155 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 17156 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17157 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17158 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); 17159 17160 return createResultValueType( result, surface ); 17161 } 17162 17163 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17164 template <typename Dispatch> 17165 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17166 Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, 17167 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17168 Dispatch const & d ) const 17169 { 17170 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17171 17172 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17173 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17174 d.vkCreateMetalSurfaceEXT( m_instance, 17175 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 17176 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17177 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17178 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); 17179 17180 return createResultValueType( 17181 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 17182 } 17183 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17184 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17185 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 17186 17187 //=== VK_KHR_fragment_shading_rate === 17188 17189 template <typename Dispatch> 17190 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getFragmentShadingRatesKHR(uint32_t * pFragmentShadingRateCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,Dispatch const & d) const17191 PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, 17192 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, 17193 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17194 { 17195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17196 return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 17197 m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); 17198 } 17199 17200 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17201 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch> 17202 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17203 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(Dispatch const & d) const17204 PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const 17205 { 17206 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17207 17208 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates; 17209 uint32_t fragmentShadingRateCount; 17210 VULKAN_HPP_NAMESPACE::Result result; 17211 do 17212 { 17213 result = 17214 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 17215 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 17216 { 17217 fragmentShadingRates.resize( fragmentShadingRateCount ); 17218 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 17219 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 17220 } 17221 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17222 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 17223 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 17224 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 17225 { 17226 fragmentShadingRates.resize( fragmentShadingRateCount ); 17227 } 17228 return createResultValueType( result, fragmentShadingRates ); 17229 } 17230 17231 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, 17232 typename Dispatch, 17233 typename B1, 17234 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, int>::type> 17235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17236 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,Dispatch const & d) const17237 PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, 17238 Dispatch const & d ) const 17239 { 17240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17241 17242 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( 17243 physicalDeviceFragmentShadingRateKHRAllocator ); 17244 uint32_t fragmentShadingRateCount; 17245 VULKAN_HPP_NAMESPACE::Result result; 17246 do 17247 { 17248 result = 17249 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 17250 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) 17251 { 17252 fragmentShadingRates.resize( fragmentShadingRateCount ); 17253 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 17254 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 17255 } 17256 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17257 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 17258 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 17259 if ( fragmentShadingRateCount < fragmentShadingRates.size() ) 17260 { 17261 fragmentShadingRates.resize( fragmentShadingRateCount ); 17262 } 17263 return createResultValueType( result, fragmentShadingRates ); 17264 } 17265 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17266 17267 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const17268 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, 17269 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 17270 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17271 { 17272 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17273 d.vkCmdSetFragmentShadingRateKHR( 17274 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 17275 } 17276 17277 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17278 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const17279 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, 17280 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 17281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17282 { 17283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17284 17285 d.vkCmdSetFragmentShadingRateKHR( 17286 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 17287 } 17288 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17289 17290 //=== VK_EXT_buffer_device_address === 17291 17292 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const17293 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 17294 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17295 { 17296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17297 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 17298 } 17299 17300 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17301 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const17302 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 17303 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17304 { 17305 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17306 17307 VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 17308 17309 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 17310 } 17311 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17312 17313 //=== VK_EXT_tooling_info === 17314 17315 template <typename Dispatch> getToolPropertiesEXT(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const17316 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, 17317 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, 17318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17319 { 17320 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17321 return static_cast<Result>( 17322 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); 17323 } 17324 17325 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17326 template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> 17327 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17328 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(Dispatch const & d) const17329 PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const 17330 { 17331 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17332 17333 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; 17334 uint32_t toolCount; 17335 VULKAN_HPP_NAMESPACE::Result result; 17336 do 17337 { 17338 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 17339 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 17340 { 17341 toolProperties.resize( toolCount ); 17342 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17343 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 17344 } 17345 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17346 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 17347 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 17348 if ( toolCount < toolProperties.size() ) 17349 { 17350 toolProperties.resize( toolCount ); 17351 } 17352 return createResultValueType( result, toolProperties ); 17353 } 17354 17355 template <typename PhysicalDeviceToolPropertiesAllocator, 17356 typename Dispatch, 17357 typename B1, 17358 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, int>::type> 17359 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17360 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type getToolPropertiesEXT(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const17361 PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const 17362 { 17363 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17364 17365 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( 17366 physicalDeviceToolPropertiesAllocator ); 17367 uint32_t toolCount; 17368 VULKAN_HPP_NAMESPACE::Result result; 17369 do 17370 { 17371 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 17372 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) 17373 { 17374 toolProperties.resize( toolCount ); 17375 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17376 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); 17377 } 17378 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17379 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 17380 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 17381 if ( toolCount < toolProperties.size() ) 17382 { 17383 toolProperties.resize( toolCount ); 17384 } 17385 return createResultValueType( result, toolProperties ); 17386 } 17387 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17388 17389 //=== VK_KHR_present_wait === 17390 17391 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 17392 template <typename Dispatch> waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const17393 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 17394 uint64_t presentId, 17395 uint64_t timeout, 17396 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17397 { 17398 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17399 return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 17400 } 17401 #else 17402 template <typename Dispatch> 17403 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t presentId,uint64_t timeout,Dispatch const & d) const17404 Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const 17405 { 17406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17407 17408 VULKAN_HPP_NAMESPACE::Result result = 17409 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); 17410 resultCheck( result, 17411 VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", 17412 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 17413 17414 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 17415 } 17416 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17417 17418 //=== VK_NV_cooperative_matrix === 17419 17420 template <typename Dispatch> getCooperativeMatrixPropertiesNV(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,Dispatch const & d) const17421 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( 17422 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17423 { 17424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17425 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 17426 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); 17427 } 17428 17429 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17430 template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch> 17431 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17432 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const & d) const17433 PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const 17434 { 17435 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17436 17437 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties; 17438 uint32_t propertyCount; 17439 VULKAN_HPP_NAMESPACE::Result result; 17440 do 17441 { 17442 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 17443 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 17444 { 17445 properties.resize( propertyCount ); 17446 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 17447 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 17448 } 17449 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17450 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 17451 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 17452 if ( propertyCount < properties.size() ) 17453 { 17454 properties.resize( propertyCount ); 17455 } 17456 return createResultValueType( result, properties ); 17457 } 17458 17459 template <typename CooperativeMatrixPropertiesNVAllocator, 17460 typename Dispatch, 17461 typename B1, 17462 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, int>::type> 17463 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17464 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,Dispatch const & d) const17465 PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, 17466 Dispatch const & d ) const 17467 { 17468 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17469 17470 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( 17471 cooperativeMatrixPropertiesNVAllocator ); 17472 uint32_t propertyCount; 17473 VULKAN_HPP_NAMESPACE::Result result; 17474 do 17475 { 17476 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 17477 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 17478 { 17479 properties.resize( propertyCount ); 17480 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 17481 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 17482 } 17483 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17484 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 17485 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 17486 if ( propertyCount < properties.size() ) 17487 { 17488 properties.resize( propertyCount ); 17489 } 17490 return createResultValueType( result, properties ); 17491 } 17492 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17493 17494 //=== VK_NV_coverage_reduction_mode === 17495 17496 template <typename Dispatch> getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t * pCombinationCount,VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,Dispatch const & d) const17497 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 17498 uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17499 { 17500 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17501 return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 17502 m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); 17503 } 17504 17505 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17506 template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch> 17507 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17508 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const & d) const17509 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const 17510 { 17511 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17512 17513 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations; 17514 uint32_t combinationCount; 17515 VULKAN_HPP_NAMESPACE::Result result; 17516 do 17517 { 17518 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17519 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 17520 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 17521 { 17522 combinations.resize( combinationCount ); 17523 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 17524 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 17525 } 17526 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17527 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 17528 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 17529 if ( combinationCount < combinations.size() ) 17530 { 17531 combinations.resize( combinationCount ); 17532 } 17533 return createResultValueType( result, combinations ); 17534 } 17535 17536 template <typename FramebufferMixedSamplesCombinationNVAllocator, 17537 typename Dispatch, 17538 typename B1, 17539 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, int>::type> 17540 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 17541 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,Dispatch const & d) const17542 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 17543 FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const 17544 { 17545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17546 17547 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( 17548 framebufferMixedSamplesCombinationNVAllocator ); 17549 uint32_t combinationCount; 17550 VULKAN_HPP_NAMESPACE::Result result; 17551 do 17552 { 17553 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17554 d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); 17555 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) 17556 { 17557 combinations.resize( combinationCount ); 17558 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 17559 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 17560 } 17561 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17562 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 17563 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 17564 if ( combinationCount < combinations.size() ) 17565 { 17566 combinations.resize( combinationCount ); 17567 } 17568 return createResultValueType( result, combinations ); 17569 } 17570 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17571 17572 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 17573 //=== VK_EXT_full_screen_exclusive === 17574 17575 template <typename Dispatch> 17576 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const17577 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 17578 uint32_t * pPresentModeCount, 17579 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 17580 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17581 { 17582 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17583 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 17584 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 17585 pPresentModeCount, 17586 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 17587 } 17588 17589 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17590 template <typename PresentModeKHRAllocator, typename Dispatch> 17591 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const17592 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 17593 { 17594 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17595 17596 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; 17597 uint32_t presentModeCount; 17598 VULKAN_HPP_NAMESPACE::Result result; 17599 do 17600 { 17601 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 17602 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 17603 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 17604 { 17605 presentModes.resize( presentModeCount ); 17606 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17607 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 17608 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 17609 &presentModeCount, 17610 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 17611 } 17612 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17613 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 17614 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 17615 if ( presentModeCount < presentModes.size() ) 17616 { 17617 presentModes.resize( presentModeCount ); 17618 } 17619 return createResultValueType( result, presentModes ); 17620 } 17621 17622 template <typename PresentModeKHRAllocator, 17623 typename Dispatch, 17624 typename B1, 17625 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PresentModeKHR>::value, int>::type> 17626 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const17627 PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 17628 PresentModeKHRAllocator & presentModeKHRAllocator, 17629 Dispatch const & d ) const 17630 { 17631 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17632 17633 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 17634 uint32_t presentModeCount; 17635 VULKAN_HPP_NAMESPACE::Result result; 17636 do 17637 { 17638 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 17639 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); 17640 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) 17641 { 17642 presentModes.resize( presentModeCount ); 17643 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 17644 d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, 17645 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 17646 &presentModeCount, 17647 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 17648 } 17649 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 17650 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 17651 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 17652 if ( presentModeCount < presentModes.size() ) 17653 { 17654 presentModes.resize( presentModeCount ); 17655 } 17656 return createResultValueType( result, presentModes ); 17657 } 17658 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17659 17660 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 17661 template <typename Dispatch> acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const17662 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 17663 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17664 { 17665 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17666 return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 17667 } 17668 # else 17669 template <typename Dispatch> 17670 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const17671 Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 17672 { 17673 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17674 17675 VULKAN_HPP_NAMESPACE::Result result = 17676 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 17677 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); 17678 17679 return createResultValueType( result ); 17680 } 17681 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17682 17683 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 17684 template <typename Dispatch> releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const17685 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 17686 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17687 { 17688 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17689 return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 17690 } 17691 # else 17692 template <typename Dispatch> 17693 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const17694 Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 17695 { 17696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17697 17698 VULKAN_HPP_NAMESPACE::Result result = 17699 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 17700 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); 17701 17702 return createResultValueType( result ); 17703 } 17704 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17705 17706 template <typename Dispatch> 17707 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const17708 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 17709 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 17710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17711 { 17712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17713 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 17714 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 17715 } 17716 17717 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17718 template <typename Dispatch> 17719 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const17720 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 17721 { 17722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17723 17724 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 17725 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 17726 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 17727 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); 17728 17729 return createResultValueType( result, modes ); 17730 } 17731 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17732 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 17733 17734 //=== VK_EXT_headless_surface === 17735 17736 template <typename Dispatch> createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const17737 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, 17738 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17739 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 17740 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17741 { 17742 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17743 return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, 17744 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), 17745 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17746 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 17747 } 17748 17749 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17750 template <typename Dispatch> 17751 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17752 Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 17753 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17754 Dispatch const & d ) const 17755 { 17756 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17757 17758 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17759 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 17760 m_instance, 17761 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 17762 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17763 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17764 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); 17765 17766 return createResultValueType( result, surface ); 17767 } 17768 17769 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17770 template <typename Dispatch> 17771 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const17772 Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, 17773 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 17774 Dispatch const & d ) const 17775 { 17776 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17777 17778 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17779 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateHeadlessSurfaceEXT( 17780 m_instance, 17781 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 17782 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17783 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17784 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); 17785 17786 return createResultValueType( 17787 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 17788 } 17789 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 17790 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17791 17792 //=== VK_KHR_buffer_device_address === 17793 17794 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const17795 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 17796 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17797 { 17798 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17799 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 17800 } 17801 17802 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17803 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const17804 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 17805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17806 { 17807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17808 17809 VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 17810 17811 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 17812 } 17813 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17814 17815 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const17816 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 17817 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17818 { 17819 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17820 return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 17821 } 17822 17823 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17824 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const17825 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, 17826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17827 { 17828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17829 17830 uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 17831 17832 return result; 17833 } 17834 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17835 17836 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const17837 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 17838 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17839 { 17840 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17841 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 17842 } 17843 17844 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17845 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const17846 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, 17847 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17848 { 17849 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17850 17851 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 17852 17853 return result; 17854 } 17855 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17856 17857 //=== VK_EXT_line_rasterization === 17858 17859 template <typename Dispatch> 17860 VULKAN_HPP_INLINE void setLineStippleEXT(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const17861 CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17862 { 17863 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17864 d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 17865 } 17866 17867 //=== VK_EXT_host_query_reset === 17868 17869 template <typename Dispatch> resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const17870 VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 17871 uint32_t firstQuery, 17872 uint32_t queryCount, 17873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17874 { 17875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17876 d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 17877 } 17878 17879 //=== VK_EXT_extended_dynamic_state === 17880 17881 template <typename Dispatch> setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const17882 VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17883 { 17884 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17885 d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 17886 } 17887 17888 template <typename Dispatch> setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const17889 VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17890 { 17891 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17892 d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 17893 } 17894 17895 template <typename Dispatch> setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const17896 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 17897 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17898 { 17899 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17900 d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 17901 } 17902 17903 template <typename Dispatch> setViewportWithCountEXT(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const17904 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, 17905 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 17906 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17907 { 17908 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17909 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 17910 } 17911 17912 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17913 template <typename Dispatch> setViewportWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const17914 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 17915 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17916 { 17917 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17918 17919 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 17920 } 17921 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17922 17923 template <typename Dispatch> 17924 VULKAN_HPP_INLINE void setScissorWithCountEXT(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const17925 CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17926 { 17927 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17928 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 17929 } 17930 17931 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17932 template <typename Dispatch> setScissorWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const17933 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 17934 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17935 { 17936 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17937 17938 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 17939 } 17940 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17941 17942 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const17943 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 17944 uint32_t bindingCount, 17945 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 17946 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 17947 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 17948 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 17949 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17950 { 17951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17952 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 17953 firstBinding, 17954 bindingCount, 17955 reinterpret_cast<const VkBuffer *>( pBuffers ), 17956 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 17957 reinterpret_cast<const VkDeviceSize *>( pSizes ), 17958 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 17959 } 17960 17961 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17962 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const17963 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 17964 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 17965 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 17966 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 17967 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 17968 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 17969 { 17970 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 17971 # ifdef VULKAN_HPP_NO_EXCEPTIONS 17972 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 17973 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 17974 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 17975 # else 17976 if ( buffers.size() != offsets.size() ) 17977 { 17978 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); 17979 } 17980 if ( !sizes.empty() && buffers.size() != sizes.size() ) 17981 { 17982 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); 17983 } 17984 if ( !strides.empty() && buffers.size() != strides.size() ) 17985 { 17986 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); 17987 } 17988 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 17989 17990 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 17991 firstBinding, 17992 buffers.size(), 17993 reinterpret_cast<const VkBuffer *>( buffers.data() ), 17994 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 17995 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 17996 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 17997 } 17998 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 17999 18000 template <typename Dispatch> setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const18001 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18002 { 18003 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18004 d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 18005 } 18006 18007 template <typename Dispatch> setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const18008 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18009 { 18010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18011 d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 18012 } 18013 18014 template <typename Dispatch> setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const18015 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18016 { 18017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18018 d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 18019 } 18020 18021 template <typename Dispatch> setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const18022 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 18023 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18024 { 18025 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18026 d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 18027 } 18028 18029 template <typename Dispatch> setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const18030 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18031 { 18032 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18033 d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 18034 } 18035 18036 template <typename Dispatch> setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const18037 VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 18038 VULKAN_HPP_NAMESPACE::StencilOp failOp, 18039 VULKAN_HPP_NAMESPACE::StencilOp passOp, 18040 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 18041 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 18042 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18043 { 18044 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18045 d.vkCmdSetStencilOpEXT( m_commandBuffer, 18046 static_cast<VkStencilFaceFlags>( faceMask ), 18047 static_cast<VkStencilOp>( failOp ), 18048 static_cast<VkStencilOp>( passOp ), 18049 static_cast<VkStencilOp>( depthFailOp ), 18050 static_cast<VkCompareOp>( compareOp ) ); 18051 } 18052 18053 //=== VK_KHR_deferred_host_operations === 18054 18055 template <typename Dispatch> createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,Dispatch const & d) const18056 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18057 VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, 18058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18059 { 18060 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18061 return static_cast<Result>( d.vkCreateDeferredOperationKHR( 18062 m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); 18063 } 18064 18065 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18066 template <typename Dispatch> 18067 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18068 Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 18069 { 18070 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18071 18072 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 18073 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 18074 m_device, 18075 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18076 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 18077 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); 18078 18079 return createResultValueType( result, deferredOperation ); 18080 } 18081 18082 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18083 template <typename Dispatch> 18084 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18085 Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const 18086 { 18087 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18088 18089 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 18090 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDeferredOperationKHR( 18091 m_device, 18092 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18093 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 18094 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); 18095 18096 return createResultValueType( 18097 result, UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18098 } 18099 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18100 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18101 18102 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18103 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18104 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18105 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18106 { 18107 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18108 d.vkDestroyDeferredOperationKHR( 18109 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18110 } 18111 18112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18113 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18114 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18115 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18116 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18117 { 18118 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18119 18120 d.vkDestroyDeferredOperationKHR( 18121 m_device, 18122 static_cast<VkDeferredOperationKHR>( operation ), 18123 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18124 } 18125 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18126 18127 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18128 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18129 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18130 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18131 { 18132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18133 d.vkDestroyDeferredOperationKHR( 18134 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18135 } 18136 18137 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18138 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18139 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18140 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18141 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18142 { 18143 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18144 18145 d.vkDestroyDeferredOperationKHR( 18146 m_device, 18147 static_cast<VkDeferredOperationKHR>( operation ), 18148 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18149 } 18150 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18151 18152 template <typename Dispatch> getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const18153 VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18155 { 18156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18157 return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ); 18158 } 18159 18160 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18161 template <typename Dispatch> getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const18162 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18163 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18164 { 18165 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18166 return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 18167 } 18168 #else 18169 template <typename Dispatch> 18170 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const18171 Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18172 { 18173 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18174 18175 VULKAN_HPP_NAMESPACE::Result result = 18176 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 18177 18178 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 18179 } 18180 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18181 18182 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18183 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const18184 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18186 { 18187 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18188 return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 18189 } 18190 #else 18191 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const18192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 18193 Dispatch const & d ) const 18194 { 18195 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18196 18197 VULKAN_HPP_NAMESPACE::Result result = 18198 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 18199 resultCheck( result, 18200 VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", 18201 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); 18202 18203 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 18204 } 18205 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18206 18207 //=== VK_KHR_pipeline_executable_properties === 18208 18209 template <typename Dispatch> getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,Dispatch const & d) const18210 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, 18211 uint32_t * pExecutableCount, 18212 VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, 18213 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18214 { 18215 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18216 return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, 18217 reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), 18218 pExecutableCount, 18219 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); 18220 } 18221 18222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18223 template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch> 18224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 18225 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,Dispatch const & d) const18226 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const 18227 { 18228 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18229 18230 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties; 18231 uint32_t executableCount; 18232 VULKAN_HPP_NAMESPACE::Result result; 18233 do 18234 { 18235 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18236 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 18237 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 18238 { 18239 properties.resize( executableCount ); 18240 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18241 d.vkGetPipelineExecutablePropertiesKHR( m_device, 18242 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 18243 &executableCount, 18244 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 18245 } 18246 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18247 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 18248 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 18249 if ( executableCount < properties.size() ) 18250 { 18251 properties.resize( executableCount ); 18252 } 18253 return createResultValueType( result, properties ); 18254 } 18255 18256 template <typename PipelineExecutablePropertiesKHRAllocator, 18257 typename Dispatch, 18258 typename B1, 18259 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, int>::type> 18260 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 18261 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,Dispatch const & d) const18262 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, 18263 PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, 18264 Dispatch const & d ) const 18265 { 18266 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18267 18268 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( 18269 pipelineExecutablePropertiesKHRAllocator ); 18270 uint32_t executableCount; 18271 VULKAN_HPP_NAMESPACE::Result result; 18272 do 18273 { 18274 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18275 d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 18276 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) 18277 { 18278 properties.resize( executableCount ); 18279 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18280 d.vkGetPipelineExecutablePropertiesKHR( m_device, 18281 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 18282 &executableCount, 18283 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 18284 } 18285 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18286 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 18287 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 18288 if ( executableCount < properties.size() ) 18289 { 18290 properties.resize( executableCount ); 18291 } 18292 return createResultValueType( result, properties ); 18293 } 18294 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18295 18296 template <typename Dispatch> 18297 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,Dispatch const & d) const18298 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 18299 uint32_t * pStatisticCount, 18300 VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, 18301 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18302 { 18303 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18304 return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, 18305 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 18306 pStatisticCount, 18307 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); 18308 } 18309 18310 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18311 template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch> 18312 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 18313 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const18314 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 18315 { 18316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18317 18318 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics; 18319 uint32_t statisticCount; 18320 VULKAN_HPP_NAMESPACE::Result result; 18321 do 18322 { 18323 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 18324 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 18325 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 18326 { 18327 statistics.resize( statisticCount ); 18328 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18329 d.vkGetPipelineExecutableStatisticsKHR( m_device, 18330 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 18331 &statisticCount, 18332 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 18333 } 18334 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18335 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 18336 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 18337 if ( statisticCount < statistics.size() ) 18338 { 18339 statistics.resize( statisticCount ); 18340 } 18341 return createResultValueType( result, statistics ); 18342 } 18343 18344 template <typename PipelineExecutableStatisticKHRAllocator, 18345 typename Dispatch, 18346 typename B1, 18347 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, int>::type> 18348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 18349 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,Dispatch const & d) const18350 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 18351 PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, 18352 Dispatch const & d ) const 18353 { 18354 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18355 18356 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( 18357 pipelineExecutableStatisticKHRAllocator ); 18358 uint32_t statisticCount; 18359 VULKAN_HPP_NAMESPACE::Result result; 18360 do 18361 { 18362 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableStatisticsKHR( 18363 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); 18364 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) 18365 { 18366 statistics.resize( statisticCount ); 18367 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18368 d.vkGetPipelineExecutableStatisticsKHR( m_device, 18369 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 18370 &statisticCount, 18371 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 18372 } 18373 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18374 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 18375 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 18376 if ( statisticCount < statistics.size() ) 18377 { 18378 statistics.resize( statisticCount ); 18379 } 18380 return createResultValueType( result, statistics ); 18381 } 18382 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18383 18384 template <typename Dispatch> 18385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,Dispatch const & d) const18386 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 18387 uint32_t * pInternalRepresentationCount, 18388 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, 18389 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18390 { 18391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18392 return static_cast<Result>( 18393 d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, 18394 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 18395 pInternalRepresentationCount, 18396 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); 18397 } 18398 18399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18400 template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch> 18401 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 18402 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const18403 Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const 18404 { 18405 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18406 18407 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 18408 internalRepresentations; 18409 uint32_t internalRepresentationCount; 18410 VULKAN_HPP_NAMESPACE::Result result; 18411 do 18412 { 18413 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 18414 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 18415 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 18416 { 18417 internalRepresentations.resize( internalRepresentationCount ); 18418 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 18419 m_device, 18420 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 18421 &internalRepresentationCount, 18422 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 18423 } 18424 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18425 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 18426 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 18427 if ( internalRepresentationCount < internalRepresentations.size() ) 18428 { 18429 internalRepresentations.resize( internalRepresentationCount ); 18430 } 18431 return createResultValueType( result, internalRepresentations ); 18432 } 18433 18434 template <typename PipelineExecutableInternalRepresentationKHRAllocator, 18435 typename Dispatch, 18436 typename B1, 18437 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, int>::type> 18438 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 18439 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,Dispatch const & d) const18440 Device::getPipelineExecutableInternalRepresentationsKHR( 18441 const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, 18442 PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, 18443 Dispatch const & d ) const 18444 { 18445 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18446 18447 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 18448 internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); 18449 uint32_t internalRepresentationCount; 18450 VULKAN_HPP_NAMESPACE::Result result; 18451 do 18452 { 18453 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 18454 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); 18455 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) 18456 { 18457 internalRepresentations.resize( internalRepresentationCount ); 18458 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 18459 m_device, 18460 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 18461 &internalRepresentationCount, 18462 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 18463 } 18464 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 18465 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 18466 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 18467 if ( internalRepresentationCount < internalRepresentations.size() ) 18468 { 18469 internalRepresentations.resize( internalRepresentationCount ); 18470 } 18471 return createResultValueType( result, internalRepresentations ); 18472 } 18473 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18474 18475 //=== VK_EXT_host_image_copy === 18476 18477 template <typename Dispatch> copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo,Dispatch const & d) const18478 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, 18479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18480 { 18481 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18482 return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) ); 18483 } 18484 18485 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18486 template <typename Dispatch> 18487 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyMemoryToImageEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo,Dispatch const & d) const18488 Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const 18489 { 18490 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18491 18492 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18493 d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( ©MemoryToImageInfo ) ) ); 18494 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); 18495 18496 return createResultValueType( result ); 18497 } 18498 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18499 18500 template <typename Dispatch> copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo,Dispatch const & d) const18501 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, 18502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18503 { 18504 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18505 return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) ); 18506 } 18507 18508 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18509 template <typename Dispatch> 18510 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo,Dispatch const & d) const18511 Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const 18512 { 18513 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18514 18515 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18516 d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( ©ImageToMemoryInfo ) ) ); 18517 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); 18518 18519 return createResultValueType( result ); 18520 } 18521 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18522 18523 template <typename Dispatch> copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo,Dispatch const & d) const18524 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, 18525 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18526 { 18527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18528 return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) ); 18529 } 18530 18531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18532 template <typename Dispatch> 18533 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type copyImageToImageEXT(const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo,Dispatch const & d) const18534 Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const 18535 { 18536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18537 18538 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18539 d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( ©ImageToImageInfo ) ) ); 18540 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); 18541 18542 return createResultValueType( result ); 18543 } 18544 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18545 18546 template <typename Dispatch> transitionImageLayoutEXT(uint32_t transitionCount,const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions,Dispatch const & d) const18547 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, 18548 const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, 18549 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18550 { 18551 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18552 return static_cast<Result>( 18553 d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) ); 18554 } 18555 18556 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18557 template <typename Dispatch> 18558 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type transitionImageLayoutEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions,Dispatch const & d) const18559 Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions, 18560 Dispatch const & d ) const 18561 { 18562 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18563 18564 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18565 d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) ) ); 18566 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); 18567 18568 return createResultValueType( result ); 18569 } 18570 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18571 18572 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,Dispatch const & d) const18573 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, 18574 const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, 18575 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, 18576 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18577 { 18578 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18579 d.vkGetImageSubresourceLayout2EXT( m_device, 18580 static_cast<VkImage>( image ), 18581 reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ), 18582 reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); 18583 } 18584 18585 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18586 template <typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const18587 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT( 18588 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18589 { 18590 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18591 18592 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; 18593 d.vkGetImageSubresourceLayout2EXT( m_device, 18594 static_cast<VkImage>( image ), 18595 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 18596 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 18597 18598 return layout; 18599 } 18600 18601 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const18602 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT( 18603 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18604 { 18605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18606 18607 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 18608 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>(); 18609 d.vkGetImageSubresourceLayout2EXT( m_device, 18610 static_cast<VkImage>( image ), 18611 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 18612 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 18613 18614 return structureChain; 18615 } 18616 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18617 18618 //=== VK_KHR_map_memory2 === 18619 18620 template <typename Dispatch> mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo,void ** ppData,Dispatch const & d) const18621 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo, 18622 void ** ppData, 18623 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18624 { 18625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18626 return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) ); 18627 } 18628 18629 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18630 template <typename Dispatch> 18631 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type mapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo,Dispatch const & d) const18632 Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, Dispatch const & d ) const 18633 { 18634 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18635 18636 void * pData; 18637 VULKAN_HPP_NAMESPACE::Result result = 18638 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( &memoryMapInfo ), &pData ) ); 18639 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); 18640 18641 return createResultValueType( result, pData ); 18642 } 18643 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18644 18645 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,Dispatch const & d) const18646 VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, 18647 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18648 { 18649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18650 return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) ); 18651 } 18652 18653 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18654 template <typename Dispatch> unmapMemory2KHR(const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,Dispatch const & d) const18655 VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, 18656 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18657 { 18658 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18659 18660 d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) ); 18661 } 18662 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18663 18664 //=== VK_EXT_swapchain_maintenance1 === 18665 18666 template <typename Dispatch> releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo,Dispatch const & d) const18667 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, 18668 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18669 { 18670 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18671 return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) ); 18672 } 18673 18674 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18675 template <typename Dispatch> 18676 VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseSwapchainImagesEXT(const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo,Dispatch const & d) const18677 Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const 18678 { 18679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18680 18681 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 18682 d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) ) ); 18683 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); 18684 18685 return createResultValueType( result ); 18686 } 18687 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18688 18689 //=== VK_NV_device_generated_commands === 18690 18691 template <typename Dispatch> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const18692 VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, 18693 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 18694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18695 { 18696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18697 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 18698 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), 18699 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 18700 } 18701 18702 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18703 template <typename Dispatch> 18704 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const18705 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 18706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18707 { 18708 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18709 18710 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 18711 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 18712 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 18713 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 18714 18715 return memoryRequirements; 18716 } 18717 18718 template <typename X, typename Y, typename... Z, typename Dispatch> 18719 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const18720 Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, 18721 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18722 { 18723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18724 18725 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 18726 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 18727 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, 18728 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 18729 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 18730 18731 return structureChain; 18732 } 18733 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18734 18735 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const18736 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 18737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18738 { 18739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18740 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 18741 } 18742 18743 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18744 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const18745 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 18746 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18747 { 18748 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18749 18750 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 18751 } 18752 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18753 18754 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const18755 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 18756 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 18757 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18758 { 18759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18760 d.vkCmdExecuteGeneratedCommandsNV( 18761 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 18762 } 18763 18764 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18765 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const18766 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 18767 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, 18768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18769 { 18770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18771 18772 d.vkCmdExecuteGeneratedCommandsNV( 18773 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 18774 } 18775 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18776 18777 template <typename Dispatch> bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t groupIndex,Dispatch const & d) const18778 VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 18779 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 18780 uint32_t groupIndex, 18781 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18782 { 18783 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18784 d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); 18785 } 18786 18787 template <typename Dispatch> 18788 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,Dispatch const & d) const18789 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, 18790 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18791 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, 18792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18793 { 18794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18795 return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, 18796 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), 18797 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18798 reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); 18799 } 18800 18801 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18802 template <typename Dispatch> 18803 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18804 Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 18805 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18806 Dispatch const & d ) const 18807 { 18808 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18809 18810 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 18811 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 18812 m_device, 18813 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 18814 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18815 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 18816 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); 18817 18818 return createResultValueType( result, indirectCommandsLayout ); 18819 } 18820 18821 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18822 template <typename Dispatch> 18823 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18824 Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, 18825 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18826 Dispatch const & d ) const 18827 { 18828 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18829 18830 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 18831 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateIndirectCommandsLayoutNV( 18832 m_device, 18833 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 18834 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18835 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 18836 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); 18837 18838 return createResultValueType( result, 18839 UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( 18840 indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 18841 } 18842 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18843 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18844 18845 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18846 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 18847 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18848 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18849 { 18850 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18851 d.vkDestroyIndirectCommandsLayoutNV( 18852 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18853 } 18854 18855 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18856 template <typename Dispatch> destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18857 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 18858 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18859 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18860 { 18861 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18862 18863 d.vkDestroyIndirectCommandsLayoutNV( 18864 m_device, 18865 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 18866 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18867 } 18868 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18869 18870 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const18871 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 18872 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18874 { 18875 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18876 d.vkDestroyIndirectCommandsLayoutNV( 18877 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 18878 } 18879 18880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18881 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const18882 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 18883 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 18884 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18885 { 18886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18887 18888 d.vkDestroyIndirectCommandsLayoutNV( 18889 m_device, 18890 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 18891 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 18892 } 18893 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18894 18895 //=== VK_EXT_depth_bias_control === 18896 18897 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo,Dispatch const & d) const18898 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, 18899 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18900 { 18901 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18902 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); 18903 } 18904 18905 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18906 template <typename Dispatch> setDepthBias2EXT(const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo,Dispatch const & d) const18907 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, 18908 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18909 { 18910 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18911 18912 d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) ); 18913 } 18914 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18915 18916 //=== VK_EXT_acquire_drm_display === 18917 18918 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 18919 template <typename Dispatch> acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const18920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, 18921 VULKAN_HPP_NAMESPACE::DisplayKHR display, 18922 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18923 { 18924 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18925 return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 18926 } 18927 #else 18928 template <typename Dispatch> 18929 VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const18930 PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 18931 { 18932 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18933 18934 VULKAN_HPP_NAMESPACE::Result result = 18935 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 18936 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); 18937 18938 return createResultValueType( result ); 18939 } 18940 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18941 18942 template <typename Dispatch> getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,VULKAN_HPP_NAMESPACE::DisplayKHR * display,Dispatch const & d) const18943 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, 18944 uint32_t connectorId, 18945 VULKAN_HPP_NAMESPACE::DisplayKHR * display, 18946 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18947 { 18948 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18949 return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); 18950 } 18951 18952 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18953 template <typename Dispatch> 18954 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const18955 PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 18956 { 18957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18958 18959 VULKAN_HPP_NAMESPACE::DisplayKHR display; 18960 VULKAN_HPP_NAMESPACE::Result result = 18961 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 18962 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); 18963 18964 return createResultValueType( result, display ); 18965 } 18966 18967 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18968 template <typename Dispatch> 18969 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getDrmDisplayEXTUnique(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const18970 PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 18971 { 18972 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18973 18974 VULKAN_HPP_NAMESPACE::DisplayKHR display; 18975 VULKAN_HPP_NAMESPACE::Result result = 18976 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 18977 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); 18978 18979 return createResultValueType( result, 18980 UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 18981 } 18982 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 18983 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 18984 18985 //=== VK_EXT_private_data === 18986 18987 template <typename Dispatch> createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const18988 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, 18989 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18990 VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, 18991 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18992 { 18993 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 18994 return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, 18995 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), 18996 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18997 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); 18998 } 18999 19000 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19001 template <typename Dispatch> 19002 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19003 Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 19004 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19005 Dispatch const & d ) const 19006 { 19007 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19008 19009 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 19010 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 19011 m_device, 19012 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 19013 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19014 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 19015 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); 19016 19017 return createResultValueType( result, privateDataSlot ); 19018 } 19019 19020 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19021 template <typename Dispatch> 19022 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type createPrivateDataSlotEXTUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19023 Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, 19024 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19025 Dispatch const & d ) const 19026 { 19027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19028 19029 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; 19030 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreatePrivateDataSlotEXT( 19031 m_device, 19032 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), 19033 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19034 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); 19035 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); 19036 19037 return createResultValueType( 19038 result, UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 19039 } 19040 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19041 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19042 19043 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const19044 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 19045 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19046 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19047 { 19048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19049 d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 19050 } 19051 19052 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19053 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19054 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 19055 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19056 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19057 { 19058 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19059 19060 d.vkDestroyPrivateDataSlotEXT( 19061 m_device, 19062 static_cast<VkPrivateDataSlot>( privateDataSlot ), 19063 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 19064 } 19065 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19066 19067 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 19068 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const19069 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 19070 uint64_t objectHandle, 19071 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 19072 uint64_t data, 19073 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19074 { 19075 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19076 return static_cast<Result>( 19077 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 19078 } 19079 #else 19080 template <typename Dispatch> setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const19081 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 19082 uint64_t objectHandle, 19083 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 19084 uint64_t data, 19085 Dispatch const & d ) const 19086 { 19087 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19088 19089 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19090 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); 19091 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); 19092 19093 return createResultValueType( result ); 19094 } 19095 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 19096 19097 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const19098 VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 19099 uint64_t objectHandle, 19100 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 19101 uint64_t * pData, 19102 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19103 { 19104 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19105 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); 19106 } 19107 19108 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19109 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const19110 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, 19111 uint64_t objectHandle, 19112 VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, 19113 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19114 { 19115 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19116 19117 uint64_t data; 19118 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); 19119 19120 return data; 19121 } 19122 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19123 19124 //=== VK_KHR_video_encode_queue === 19125 19126 template <typename Dispatch> 19127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo,VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties,Dispatch const & d) const19128 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, 19129 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, 19130 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19131 { 19132 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19133 return static_cast<Result>( 19134 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 19135 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ), 19136 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) ); 19137 } 19138 19139 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19140 template <typename Dispatch> 19141 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const19142 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 19143 Dispatch const & d ) const 19144 { 19145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19146 19147 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; 19148 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19149 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 19150 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 19151 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 19152 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 19153 19154 return createResultValueType( result, qualityLevelProperties ); 19155 } 19156 19157 template <typename X, typename Y, typename... Z, typename Dispatch> 19158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getVideoEncodeQualityLevelPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,Dispatch const & d) const19159 PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, 19160 Dispatch const & d ) const 19161 { 19162 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19163 19164 StructureChain<X, Y, Z...> structureChain; 19165 VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = 19166 structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>(); 19167 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19168 d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, 19169 reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), 19170 reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); 19171 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); 19172 19173 return createResultValueType( result, structureChain ); 19174 } 19175 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19176 19177 template <typename Dispatch> 19178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo,VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo,size_t * pDataSize,void * pData,Dispatch const & d) const19179 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, 19180 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, 19181 size_t * pDataSize, 19182 void * pData, 19183 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19184 { 19185 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19186 return static_cast<Result>( 19187 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19188 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ), 19189 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ), 19190 pDataSize, 19191 pData ) ); 19192 } 19193 19194 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19195 template <typename Uint8_tAllocator, typename Dispatch> 19196 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19197 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const19198 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 19199 Dispatch const & d ) const 19200 { 19201 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19202 19203 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; 19204 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 19205 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 19206 size_t dataSize; 19207 VULKAN_HPP_NAMESPACE::Result result; 19208 do 19209 { 19210 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19211 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19212 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19213 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19214 &dataSize, 19215 nullptr ) ); 19216 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 19217 { 19218 data.resize( dataSize ); 19219 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19220 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19221 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19222 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19223 &dataSize, 19224 reinterpret_cast<void *>( data.data() ) ) ); 19225 } 19226 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19227 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 19228 19229 return createResultValueType( result, data_ ); 19230 } 19231 19232 template <typename Uint8_tAllocator, 19233 typename Dispatch, 19234 typename B2, 19235 typename std::enable_if<std::is_same<typename B2::value_type, uint8_t>::value, int>::type> 19236 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19237 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const19238 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 19239 Uint8_tAllocator & uint8_tAllocator, 19240 Dispatch const & d ) const 19241 { 19242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19243 19244 std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( 19245 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 19246 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; 19247 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 19248 size_t dataSize; 19249 VULKAN_HPP_NAMESPACE::Result result; 19250 do 19251 { 19252 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19253 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19254 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19255 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19256 &dataSize, 19257 nullptr ) ); 19258 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 19259 { 19260 data.resize( dataSize ); 19261 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19262 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19263 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19264 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19265 &dataSize, 19266 reinterpret_cast<void *>( data.data() ) ) ); 19267 } 19268 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19269 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 19270 19271 return createResultValueType( result, data_ ); 19272 } 19273 19274 template <typename X, typename Y, typename... Z, typename Uint8_tAllocator, typename Dispatch> 19275 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19276 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Dispatch const & d) const19277 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 19278 Dispatch const & d ) const 19279 { 19280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19281 19282 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_; 19283 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 19284 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 19285 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 19286 size_t dataSize; 19287 VULKAN_HPP_NAMESPACE::Result result; 19288 do 19289 { 19290 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19291 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19292 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19293 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19294 &dataSize, 19295 nullptr ) ); 19296 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 19297 { 19298 data.resize( dataSize ); 19299 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19300 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19301 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19302 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19303 &dataSize, 19304 reinterpret_cast<void *>( data.data() ) ) ); 19305 } 19306 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19307 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 19308 19309 return createResultValueType( result, data_ ); 19310 } 19311 19312 template <typename X, 19313 typename Y, 19314 typename... Z, 19315 typename Uint8_tAllocator, 19316 typename Dispatch, 19317 typename B2, 19318 typename std::enable_if<std::is_same<typename B2::value_type, uint8_t>::value, int>::type> 19319 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 19320 typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type getEncodedVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const19321 Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, 19322 Uint8_tAllocator & uint8_tAllocator, 19323 Dispatch const & d ) const 19324 { 19325 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19326 19327 std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_( 19328 std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); 19329 VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = 19330 data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>(); 19331 std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; 19332 size_t dataSize; 19333 VULKAN_HPP_NAMESPACE::Result result; 19334 do 19335 { 19336 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19337 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19338 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19339 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19340 &dataSize, 19341 nullptr ) ); 19342 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 19343 { 19344 data.resize( dataSize ); 19345 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19346 d.vkGetEncodedVideoSessionParametersKHR( m_device, 19347 reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), 19348 reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), 19349 &dataSize, 19350 reinterpret_cast<void *>( data.data() ) ) ); 19351 } 19352 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19353 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); 19354 19355 return createResultValueType( result, data_ ); 19356 } 19357 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19358 19359 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,Dispatch const & d) const19360 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, 19361 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19362 { 19363 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19364 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); 19365 } 19366 19367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19368 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,Dispatch const & d) const19369 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, 19370 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19371 { 19372 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19373 19374 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) ); 19375 } 19376 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19377 19378 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 19379 //=== VK_NV_cuda_kernel_launch === 19380 19381 template <typename Dispatch> createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,Dispatch const & d) const19382 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, 19383 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19384 VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, 19385 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19386 { 19387 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19388 return static_cast<Result>( d.vkCreateCudaModuleNV( m_device, 19389 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ), 19390 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 19391 reinterpret_cast<VkCudaModuleNV *>( pModule ) ) ); 19392 } 19393 19394 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19395 template <typename Dispatch> 19396 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type createCudaModuleNV(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19397 Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 19398 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19399 Dispatch const & d ) const 19400 { 19401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19402 19403 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 19404 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19405 d.vkCreateCudaModuleNV( m_device, 19406 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 19407 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19408 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 19409 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); 19410 19411 return createResultValueType( result, module ); 19412 } 19413 19414 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19415 template <typename Dispatch> 19416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type createCudaModuleNVUnique(const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19417 Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, 19418 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19419 Dispatch const & d ) const 19420 { 19421 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19422 19423 VULKAN_HPP_NAMESPACE::CudaModuleNV module; 19424 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19425 d.vkCreateCudaModuleNV( m_device, 19426 reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), 19427 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19428 reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); 19429 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); 19430 19431 return createResultValueType( 19432 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 19433 } 19434 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19435 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19436 19437 template <typename Dispatch> getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,size_t * pCacheSize,void * pCacheData,Dispatch const & d) const19438 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 19439 size_t * pCacheSize, 19440 void * pCacheData, 19441 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19442 { 19443 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19444 return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); 19445 } 19446 19447 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19448 template <typename Uint8_tAllocator, typename Dispatch> 19449 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Dispatch const & d) const19450 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const 19451 { 19452 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19453 19454 std::vector<uint8_t, Uint8_tAllocator> cacheData; 19455 size_t cacheSize; 19456 VULKAN_HPP_NAMESPACE::Result result; 19457 do 19458 { 19459 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 19460 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 19461 { 19462 cacheData.resize( cacheSize ); 19463 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19464 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 19465 } 19466 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19467 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 19468 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 19469 if ( cacheSize < cacheData.size() ) 19470 { 19471 cacheData.resize( cacheSize ); 19472 } 19473 return createResultValueType( result, cacheData ); 19474 } 19475 19476 template <typename Uint8_tAllocator, 19477 typename Dispatch, 19478 typename B1, 19479 typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> 19480 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const19481 Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 19482 { 19483 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19484 19485 std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator ); 19486 size_t cacheSize; 19487 VULKAN_HPP_NAMESPACE::Result result; 19488 do 19489 { 19490 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); 19491 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) 19492 { 19493 cacheData.resize( cacheSize ); 19494 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19495 d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); 19496 } 19497 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 19498 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); 19499 VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); 19500 if ( cacheSize < cacheData.size() ) 19501 { 19502 cacheData.resize( cacheSize ); 19503 } 19504 return createResultValueType( result, cacheData ); 19505 } 19506 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19507 19508 template <typename Dispatch> createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,Dispatch const & d) const19509 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, 19510 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19511 VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, 19512 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19513 { 19514 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19515 return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device, 19516 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ), 19517 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 19518 reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) ); 19519 } 19520 19521 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19522 template <typename Dispatch> 19523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type createCudaFunctionNV(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19524 Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 19525 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19526 Dispatch const & d ) const 19527 { 19528 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19529 19530 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 19531 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19532 d.vkCreateCudaFunctionNV( m_device, 19533 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 19534 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19535 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 19536 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); 19537 19538 return createResultValueType( result, function ); 19539 } 19540 19541 # ifndef VULKAN_HPP_NO_SMART_HANDLE 19542 template <typename Dispatch> 19543 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type createCudaFunctionNVUnique(const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19544 Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, 19545 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19546 Dispatch const & d ) const 19547 { 19548 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19549 19550 VULKAN_HPP_NAMESPACE::CudaFunctionNV function; 19551 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19552 d.vkCreateCudaFunctionNV( m_device, 19553 reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), 19554 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 19555 reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); 19556 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); 19557 19558 return createResultValueType( 19559 result, UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 19560 } 19561 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 19562 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19563 19564 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const19565 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 19566 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19567 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19568 { 19569 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19570 d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 19571 } 19572 19573 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19574 template <typename Dispatch> destroyCudaModuleNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19575 VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 19576 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19577 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19578 { 19579 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19580 19581 d.vkDestroyCudaModuleNV( m_device, 19582 static_cast<VkCudaModuleNV>( module ), 19583 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 19584 } 19585 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19586 19587 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const19588 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 19589 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19590 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19591 { 19592 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19593 d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 19594 } 19595 19596 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19597 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaModuleNV module,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19598 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, 19599 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19600 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19601 { 19602 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19603 19604 d.vkDestroyCudaModuleNV( m_device, 19605 static_cast<VkCudaModuleNV>( module ), 19606 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 19607 } 19608 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19609 19610 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const19611 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 19612 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19614 { 19615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19616 d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 19617 } 19618 19619 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19620 template <typename Dispatch> destroyCudaFunctionNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19621 VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 19622 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19623 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19624 { 19625 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19626 19627 d.vkDestroyCudaFunctionNV( m_device, 19628 static_cast<VkCudaFunctionNV>( function ), 19629 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 19630 } 19631 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19632 19633 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const19634 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 19635 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 19636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19637 { 19638 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19639 d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 19640 } 19641 19642 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19643 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CudaFunctionNV function,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const19644 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, 19645 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 19646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19647 { 19648 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19649 19650 d.vkDestroyCudaFunctionNV( m_device, 19651 static_cast<VkCudaFunctionNV>( function ), 19652 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 19653 } 19654 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19655 19656 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,Dispatch const & d) const19657 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, 19658 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19659 { 19660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19661 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); 19662 } 19663 19664 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19665 template <typename Dispatch> cudaLaunchKernelNV(const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,Dispatch const & d) const19666 VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, 19667 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19668 { 19669 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19670 19671 d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) ); 19672 } 19673 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19674 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 19675 19676 #if defined( VK_USE_PLATFORM_METAL_EXT ) 19677 //=== VK_EXT_metal_objects === 19678 19679 template <typename Dispatch> exportMetalObjectsEXT(VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,Dispatch const & d) const19680 VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, 19681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19682 { 19683 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19684 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); 19685 } 19686 19687 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19688 template <typename Dispatch> 19689 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT(Dispatch const & d) const19690 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19691 { 19692 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19693 19694 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; 19695 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 19696 19697 return metalObjectsInfo; 19698 } 19699 19700 template <typename X, typename Y, typename... Z, typename Dispatch> 19701 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> exportMetalObjectsEXT(Dispatch const & d) const19702 Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19703 { 19704 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19705 19706 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 19707 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>(); 19708 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); 19709 19710 return structureChain; 19711 } 19712 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19713 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 19714 19715 //=== VK_KHR_synchronization2 === 19716 19717 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const19718 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 19719 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 19720 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19721 { 19722 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19723 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 19724 } 19725 19726 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19727 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const19728 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 19729 const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 19730 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19731 { 19732 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19733 19734 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 19735 } 19736 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19737 19738 template <typename Dispatch> resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const19739 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 19740 VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, 19741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19742 { 19743 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19744 d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); 19745 } 19746 19747 template <typename Dispatch> waitEvents2KHR(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const19748 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, 19749 const VULKAN_HPP_NAMESPACE::Event * pEvents, 19750 const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, 19751 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19752 { 19753 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19754 d.vkCmdWaitEvents2KHR( 19755 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); 19756 } 19757 19758 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19759 template <typename Dispatch> waitEvents2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const19760 VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 19761 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, 19762 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 19763 { 19764 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19765 # ifdef VULKAN_HPP_NO_EXCEPTIONS 19766 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 19767 # else 19768 if ( events.size() != dependencyInfos.size() ) 19769 { 19770 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); 19771 } 19772 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 19773 19774 d.vkCmdWaitEvents2KHR( m_commandBuffer, 19775 events.size(), 19776 reinterpret_cast<const VkEvent *>( events.data() ), 19777 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); 19778 } 19779 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19780 19781 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const19782 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, 19783 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19784 { 19785 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19786 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); 19787 } 19788 19789 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19790 template <typename Dispatch> pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const19791 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, 19792 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19793 { 19794 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19795 19796 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); 19797 } 19798 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19799 19800 template <typename Dispatch> writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const19801 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 19802 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 19803 uint32_t query, 19804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19805 { 19806 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19807 d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 19808 } 19809 19810 template <typename Dispatch> submit2KHR(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const19811 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, 19812 const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, 19813 VULKAN_HPP_NAMESPACE::Fence fence, 19814 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19815 { 19816 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19817 return static_cast<Result>( 19818 d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 19819 } 19820 19821 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19822 template <typename Dispatch> submit2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const19823 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( 19824 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const 19825 { 19826 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19827 19828 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 19829 d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); 19830 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); 19831 19832 return createResultValueType( result ); 19833 } 19834 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19835 19836 template <typename Dispatch> writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const19837 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, 19838 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 19839 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 19840 uint32_t marker, 19841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19842 { 19843 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19844 d.vkCmdWriteBufferMarker2AMD( 19845 m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker ); 19846 } 19847 19848 template <typename Dispatch> getCheckpointData2NV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,Dispatch const & d) const19849 VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, 19850 VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, 19851 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19852 { 19853 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19854 d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); 19855 } 19856 19857 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19858 template <typename CheckpointData2NVAllocator, typename Dispatch> 19859 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(Dispatch const & d) const19860 Queue::getCheckpointData2NV( Dispatch const & d ) const 19861 { 19862 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19863 19864 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData; 19865 uint32_t checkpointDataCount; 19866 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 19867 checkpointData.resize( checkpointDataCount ); 19868 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 19869 19870 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 19871 if ( checkpointDataCount < checkpointData.size() ) 19872 { 19873 checkpointData.resize( checkpointDataCount ); 19874 } 19875 return checkpointData; 19876 } 19877 19878 template <typename CheckpointData2NVAllocator, 19879 typename Dispatch, 19880 typename B1, 19881 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, int>::type> 19882 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(CheckpointData2NVAllocator & checkpointData2NVAllocator,Dispatch const & d) const19883 Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const 19884 { 19885 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19886 19887 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator ); 19888 uint32_t checkpointDataCount; 19889 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 19890 checkpointData.resize( checkpointDataCount ); 19891 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 19892 19893 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 19894 if ( checkpointDataCount < checkpointData.size() ) 19895 { 19896 checkpointData.resize( checkpointDataCount ); 19897 } 19898 return checkpointData; 19899 } 19900 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19901 19902 //=== VK_EXT_descriptor_buffer === 19903 19904 template <typename Dispatch> getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,Dispatch const & d) const19905 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 19906 VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, 19907 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19908 { 19909 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19910 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); 19911 } 19912 19913 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19914 template <typename Dispatch> 19915 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,Dispatch const & d) const19916 Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19917 { 19918 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19919 19920 VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; 19921 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) ); 19922 19923 return layoutSizeInBytes; 19924 } 19925 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19926 19927 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,VULKAN_HPP_NAMESPACE::DeviceSize * pOffset,Dispatch const & d) const19928 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, 19929 uint32_t binding, 19930 VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, 19931 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19932 { 19933 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19934 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); 19935 } 19936 19937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19938 template <typename Dispatch> getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,uint32_t binding,Dispatch const & d) const19939 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( 19940 VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19941 { 19942 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19943 19944 VULKAN_HPP_NAMESPACE::DeviceSize offset; 19945 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) ); 19946 19947 return offset; 19948 } 19949 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19950 19951 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const19952 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, 19953 size_t dataSize, 19954 void * pDescriptor, 19955 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19956 { 19957 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19958 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); 19959 } 19960 19961 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19962 template <typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,size_t dataSize,void * pDescriptor,Dispatch const & d) const19963 VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 19964 size_t dataSize, 19965 void * pDescriptor, 19966 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19967 { 19968 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19969 19970 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor ); 19971 } 19972 19973 template <typename DescriptorType, typename Dispatch> getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,Dispatch const & d) const19974 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, 19975 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19976 { 19977 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19978 19979 DescriptorType descriptor; 19980 d.vkGetDescriptorEXT( 19981 m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) ); 19982 19983 return descriptor; 19984 } 19985 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 19986 19987 template <typename Dispatch> bindDescriptorBuffersEXT(uint32_t bufferCount,const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos,Dispatch const & d) const19988 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, 19989 const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, 19990 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 19991 { 19992 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 19993 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); 19994 } 19995 19996 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 19997 template <typename Dispatch> 19998 VULKAN_HPP_INLINE void bindDescriptorBuffersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos,Dispatch const & d) const19999 CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos, 20000 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20001 { 20002 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20003 20004 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) ); 20005 } 20006 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20007 20008 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t setCount,const uint32_t * pBufferIndices,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const20009 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 20010 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 20011 uint32_t firstSet, 20012 uint32_t setCount, 20013 const uint32_t * pBufferIndices, 20014 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 20015 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20016 { 20017 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20018 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 20019 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 20020 static_cast<VkPipelineLayout>( layout ), 20021 firstSet, 20022 setCount, 20023 pBufferIndices, 20024 reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 20025 } 20026 20027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20028 template <typename Dispatch> setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const20029 VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 20030 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 20031 uint32_t firstSet, 20032 VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices, 20033 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 20034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 20035 { 20036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20037 # ifdef VULKAN_HPP_NO_EXCEPTIONS 20038 VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); 20039 # else 20040 if ( bufferIndices.size() != offsets.size() ) 20041 { 20042 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); 20043 } 20044 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 20045 20046 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, 20047 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 20048 static_cast<VkPipelineLayout>( layout ), 20049 firstSet, 20050 bufferIndices.size(), 20051 bufferIndices.data(), 20052 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 20053 } 20054 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20055 20056 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplersEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,Dispatch const & d) const20057 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 20058 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 20059 uint32_t set, 20060 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20061 { 20062 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20063 d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( 20064 m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); 20065 } 20066 20067 template <typename Dispatch> getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const20068 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( 20069 const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20070 { 20071 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20072 return static_cast<Result>( 20073 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 20074 } 20075 20076 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20077 template <typename DataType, typename Dispatch> 20078 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const20079 Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 20080 { 20081 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20082 20083 DataType data; 20084 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20085 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 20086 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); 20087 20088 return createResultValueType( result, data ); 20089 } 20090 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20091 20092 template <typename Dispatch> getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const20093 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( 20094 const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20095 { 20096 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20097 return static_cast<Result>( 20098 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 20099 } 20100 20101 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20102 template <typename DataType, typename Dispatch> 20103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const20104 Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 20105 { 20106 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20107 20108 DataType data; 20109 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20110 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 20111 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); 20112 20113 return createResultValueType( result, data ); 20114 } 20115 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20116 20117 template <typename Dispatch> getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const20118 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( 20119 const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20120 { 20121 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20122 return static_cast<Result>( 20123 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 20124 } 20125 20126 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20127 template <typename DataType, typename Dispatch> 20128 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const20129 Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 20130 { 20131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20132 20133 DataType data; 20134 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20135 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 20136 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); 20137 20138 return createResultValueType( result, data ); 20139 } 20140 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20141 20142 template <typename Dispatch> getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const20143 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( 20144 const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20145 { 20146 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20147 return static_cast<Result>( 20148 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 20149 } 20150 20151 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20152 template <typename DataType, typename Dispatch> 20153 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const20154 Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const 20155 { 20156 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20157 20158 DataType data; 20159 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20160 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 20161 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); 20162 20163 return createResultValueType( result, data ); 20164 } 20165 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20166 20167 template <typename Dispatch> getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo,void * pData,Dispatch const & d) const20168 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( 20169 const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20170 { 20171 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20172 return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 20173 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); 20174 } 20175 20176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20177 template <typename DataType, typename Dispatch> 20178 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info,Dispatch const & d) const20179 Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, 20180 Dispatch const & d ) const 20181 { 20182 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20183 20184 DataType data; 20185 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( 20186 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); 20187 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); 20188 20189 return createResultValueType( result, data ); 20190 } 20191 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20192 20193 //=== VK_NV_fragment_shading_rate_enums === 20194 20195 template <typename Dispatch> setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const20196 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, 20197 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 20198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20199 { 20200 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20201 d.vkCmdSetFragmentShadingRateEnumNV( 20202 m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 20203 } 20204 20205 //=== VK_EXT_mesh_shader === 20206 20207 template <typename Dispatch> 20208 VULKAN_HPP_INLINE void drawMeshTasksEXT(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const20209 CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20210 { 20211 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20212 d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 20213 } 20214 20215 template <typename Dispatch> drawMeshTasksIndirectEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const20216 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 20217 VULKAN_HPP_NAMESPACE::DeviceSize offset, 20218 uint32_t drawCount, 20219 uint32_t stride, 20220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20221 { 20222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20223 d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 20224 } 20225 20226 template <typename Dispatch> drawMeshTasksIndirectCountEXT(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const20227 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, 20228 VULKAN_HPP_NAMESPACE::DeviceSize offset, 20229 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 20230 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 20231 uint32_t maxDrawCount, 20232 uint32_t stride, 20233 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20234 { 20235 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20236 d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, 20237 static_cast<VkBuffer>( buffer ), 20238 static_cast<VkDeviceSize>( offset ), 20239 static_cast<VkBuffer>( countBuffer ), 20240 static_cast<VkDeviceSize>( countBufferOffset ), 20241 maxDrawCount, 20242 stride ); 20243 } 20244 20245 //=== VK_KHR_copy_commands2 === 20246 20247 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const20248 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, 20249 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20250 { 20251 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20252 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); 20253 } 20254 20255 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20256 template <typename Dispatch> copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const20257 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, 20258 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20259 { 20260 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20261 20262 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); 20263 } 20264 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20265 20266 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const20267 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, 20268 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20269 { 20270 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20271 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); 20272 } 20273 20274 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20275 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const20276 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, 20277 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20278 { 20279 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20280 20281 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); 20282 } 20283 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20284 20285 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const20286 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, 20287 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20288 { 20289 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20290 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); 20291 } 20292 20293 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20294 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const20295 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, 20296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20297 { 20298 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20299 20300 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); 20301 } 20302 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20303 20304 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const20305 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, 20306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20307 { 20308 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20309 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); 20310 } 20311 20312 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20313 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const20314 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, 20315 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20316 { 20317 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20318 20319 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); 20320 } 20321 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20322 20323 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const20324 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, 20325 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20326 { 20327 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20328 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); 20329 } 20330 20331 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20332 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const20333 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, 20334 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20335 { 20336 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20337 20338 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); 20339 } 20340 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20341 20342 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const20343 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, 20344 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20345 { 20346 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20347 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); 20348 } 20349 20350 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20351 template <typename Dispatch> resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const20352 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, 20353 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20354 { 20355 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20356 20357 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); 20358 } 20359 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20360 20361 //=== VK_EXT_device_fault === 20362 20363 template <typename Dispatch> getFaultInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,Dispatch const & d) const20364 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, 20365 VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, 20366 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20367 { 20368 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20369 return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( 20370 m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); 20371 } 20372 20373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20374 template <typename Dispatch> 20375 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>> getFaultInfoEXT(Dispatch const & d) const20376 Device::getFaultInfoEXT( Dispatch const & d ) const 20377 { 20378 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20379 20380 std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data_; 20381 VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first; 20382 VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second; 20383 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceFaultInfoEXT( 20384 m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) ) ); 20385 resultCheck( 20386 result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); 20387 20388 return ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>( 20389 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ ); 20390 } 20391 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20392 20393 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 20394 //=== VK_NV_acquire_winrt_display === 20395 20396 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 20397 template <typename Dispatch> acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const20398 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, 20399 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20400 { 20401 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20402 return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 20403 } 20404 # else 20405 template <typename Dispatch> 20406 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const20407 PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 20408 { 20409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20410 20411 VULKAN_HPP_NAMESPACE::Result result = 20412 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 20413 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); 20414 20415 return createResultValueType( result ); 20416 } 20417 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 20418 20419 template <typename Dispatch> getWinrtDisplayNV(uint32_t deviceRelativeId,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const20420 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, 20421 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 20422 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20423 { 20424 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20425 return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 20426 } 20427 20428 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20429 template <typename Dispatch> 20430 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV(uint32_t deviceRelativeId,Dispatch const & d) const20431 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const 20432 { 20433 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20434 20435 VULKAN_HPP_NAMESPACE::DisplayKHR display; 20436 VULKAN_HPP_NAMESPACE::Result result = 20437 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 20438 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); 20439 20440 return createResultValueType( result, display ); 20441 } 20442 20443 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20444 template <typename Dispatch> 20445 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique(uint32_t deviceRelativeId,Dispatch const & d) const20446 PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const 20447 { 20448 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20449 20450 VULKAN_HPP_NAMESPACE::DisplayKHR display; 20451 VULKAN_HPP_NAMESPACE::Result result = 20452 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 20453 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); 20454 20455 return createResultValueType( result, 20456 UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); 20457 } 20458 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20459 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20460 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 20461 20462 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) 20463 //=== VK_EXT_directfb_surface === 20464 20465 template <typename Dispatch> createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const20466 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, 20467 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20468 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 20469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20470 { 20471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20472 return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, 20473 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), 20474 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20475 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 20476 } 20477 20478 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20479 template <typename Dispatch> 20480 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20481 Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 20482 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20483 Dispatch const & d ) const 20484 { 20485 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20486 20487 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20488 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 20489 m_instance, 20490 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 20491 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20492 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20493 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); 20494 20495 return createResultValueType( result, surface ); 20496 } 20497 20498 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20499 template <typename Dispatch> 20500 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20501 Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, 20502 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20503 Dispatch const & d ) const 20504 { 20505 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20506 20507 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 20508 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateDirectFBSurfaceEXT( 20509 m_instance, 20510 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 20511 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20512 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 20513 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); 20514 20515 return createResultValueType( 20516 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 20517 } 20518 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20519 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20520 20521 template <typename Dispatch> getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB * dfb,Dispatch const & d) const20522 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, 20523 IDirectFB * dfb, 20524 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20525 { 20526 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20527 return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); 20528 } 20529 20530 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20531 template <typename Dispatch> 20532 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB & dfb,Dispatch const & d) const20533 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20534 { 20535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20536 20537 VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); 20538 20539 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 20540 } 20541 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20542 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ 20543 20544 //=== VK_EXT_vertex_input_dynamic_state === 20545 20546 template <typename Dispatch> setVertexInputEXT(uint32_t vertexBindingDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,Dispatch const & d) const20547 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, 20548 const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, 20549 uint32_t vertexAttributeDescriptionCount, 20550 const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, 20551 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20552 { 20553 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20554 d.vkCmdSetVertexInputEXT( m_commandBuffer, 20555 vertexBindingDescriptionCount, 20556 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), 20557 vertexAttributeDescriptionCount, 20558 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) ); 20559 } 20560 20561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20562 template <typename Dispatch> setVertexInputEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,Dispatch const & d) const20563 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( 20564 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, 20565 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, 20566 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20567 { 20568 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20569 20570 d.vkCmdSetVertexInputEXT( m_commandBuffer, 20571 vertexBindingDescriptions.size(), 20572 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), 20573 vertexAttributeDescriptions.size(), 20574 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) ); 20575 } 20576 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20577 20578 #if defined( VK_USE_PLATFORM_FUCHSIA ) 20579 //=== VK_FUCHSIA_external_memory === 20580 20581 template <typename Dispatch> 20582 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const20583 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 20584 zx_handle_t * pZirconHandle, 20585 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20586 { 20587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20588 return static_cast<Result>( 20589 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 20590 } 20591 20592 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20593 template <typename Dispatch> 20594 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const20595 Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 20596 { 20597 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20598 20599 zx_handle_t zirconHandle; 20600 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20601 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 20602 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); 20603 20604 return createResultValueType( result, zirconHandle ); 20605 } 20606 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20607 20608 template <typename Dispatch> 20609 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,Dispatch const & d) const20610 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 20611 zx_handle_t zirconHandle, 20612 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, 20613 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20614 { 20615 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20616 return static_cast<Result>( 20617 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 20618 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 20619 zirconHandle, 20620 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); 20621 } 20622 20623 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20624 template <typename Dispatch> 20625 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,Dispatch const & d) const20626 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 20627 zx_handle_t zirconHandle, 20628 Dispatch const & d ) const 20629 { 20630 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20631 20632 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; 20633 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20634 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, 20635 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 20636 zirconHandle, 20637 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) ); 20638 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); 20639 20640 return createResultValueType( result, memoryZirconHandleProperties ); 20641 } 20642 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20643 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 20644 20645 #if defined( VK_USE_PLATFORM_FUCHSIA ) 20646 //=== VK_FUCHSIA_external_semaphore === 20647 20648 template <typename Dispatch> importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,Dispatch const & d) const20649 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( 20650 const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20651 { 20652 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20653 return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 20654 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); 20655 } 20656 20657 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20658 template <typename Dispatch> 20659 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,Dispatch const & d) const20660 Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, 20661 Dispatch const & d ) const 20662 { 20663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20664 20665 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 20666 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) ); 20667 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); 20668 20669 return createResultValueType( result ); 20670 } 20671 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20672 20673 template <typename Dispatch> 20674 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const20675 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 20676 zx_handle_t * pZirconHandle, 20677 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20678 { 20679 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20680 return static_cast<Result>( 20681 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 20682 } 20683 20684 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20685 template <typename Dispatch> 20686 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const20687 Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const 20688 { 20689 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20690 20691 zx_handle_t zirconHandle; 20692 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 20693 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 20694 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); 20695 20696 return createResultValueType( result, zirconHandle ); 20697 } 20698 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20699 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 20700 20701 #if defined( VK_USE_PLATFORM_FUCHSIA ) 20702 //=== VK_FUCHSIA_buffer_collection === 20703 20704 template <typename Dispatch> 20705 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,Dispatch const & d) const20706 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, 20707 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20708 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, 20709 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20710 { 20711 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20712 return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, 20713 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), 20714 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 20715 reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) ); 20716 } 20717 20718 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20719 template <typename Dispatch> 20720 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type createBufferCollectionFUCHSIA(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20721 Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 20722 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20723 Dispatch const & d ) const 20724 { 20725 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20726 20727 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 20728 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 20729 m_device, 20730 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 20731 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20732 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 20733 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); 20734 20735 return createResultValueType( result, collection ); 20736 } 20737 20738 # ifndef VULKAN_HPP_NO_SMART_HANDLE 20739 template <typename Dispatch> 20740 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type createBufferCollectionFUCHSIAUnique(const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20741 Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, 20742 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20743 Dispatch const & d ) const 20744 { 20745 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20746 20747 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; 20748 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateBufferCollectionFUCHSIA( 20749 m_device, 20750 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), 20751 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 20752 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); 20753 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); 20754 20755 return createResultValueType( 20756 result, UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 20757 } 20758 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 20759 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20760 20761 template <typename Dispatch> 20762 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,Dispatch const & d) const20763 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20764 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, 20765 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20766 { 20767 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20768 return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 20769 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); 20770 } 20771 20772 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20773 template <typename Dispatch> 20774 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionImageConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,Dispatch const & d) const20775 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20776 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, 20777 Dispatch const & d ) const 20778 { 20779 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20780 20781 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( 20782 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) ); 20783 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); 20784 20785 return createResultValueType( result ); 20786 } 20787 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20788 20789 template <typename Dispatch> 20790 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,Dispatch const & d) const20791 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20792 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, 20793 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20794 { 20795 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20796 return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 20797 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); 20798 } 20799 20800 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20801 template <typename Dispatch> 20802 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setBufferCollectionBufferConstraintsFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,Dispatch const & d) const20803 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20804 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, 20805 Dispatch const & d ) const 20806 { 20807 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20808 20809 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( 20810 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) ); 20811 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); 20812 20813 return createResultValueType( result ); 20814 } 20815 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20816 20817 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20818 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20819 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20820 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20821 { 20822 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20823 d.vkDestroyBufferCollectionFUCHSIA( 20824 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20825 } 20826 20827 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20828 template <typename Dispatch> destroyBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20829 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20830 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20831 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20832 { 20833 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20834 20835 d.vkDestroyBufferCollectionFUCHSIA( 20836 m_device, 20837 static_cast<VkBufferCollectionFUCHSIA>( collection ), 20838 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20839 } 20840 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20841 20842 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const20843 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20844 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 20845 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20846 { 20847 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20848 d.vkDestroyBufferCollectionFUCHSIA( 20849 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 20850 } 20851 20852 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20853 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const20854 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20855 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 20856 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20857 { 20858 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20859 20860 d.vkDestroyBufferCollectionFUCHSIA( 20861 m_device, 20862 static_cast<VkBufferCollectionFUCHSIA>( collection ), 20863 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 20864 } 20865 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20866 20867 template <typename Dispatch> 20868 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,Dispatch const & d) const20869 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, 20870 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, 20871 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20872 { 20873 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20874 return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 20875 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); 20876 } 20877 20878 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20879 template <typename Dispatch> 20880 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type getBufferCollectionPropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,Dispatch const & d) const20881 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const 20882 { 20883 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20884 20885 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; 20886 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( 20887 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) ); 20888 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); 20889 20890 return createResultValueType( result, properties ); 20891 } 20892 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20893 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 20894 20895 //=== VK_HUAWEI_subpass_shading === 20896 20897 template <typename Dispatch> getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,Dispatch const & d) const20898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, 20899 VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, 20900 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20901 { 20902 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20903 return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 20904 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); 20905 } 20906 20907 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20908 template <typename Dispatch> 20909 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,Dispatch const & d) const20910 Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const 20911 { 20912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20913 20914 VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; 20915 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 20916 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) ); 20917 resultCheck( result, 20918 VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", 20919 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); 20920 20921 return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize ); 20922 } 20923 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20924 20925 template <typename Dispatch> subpassShadingHUAWEI(Dispatch const & d) const20926 VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20927 { 20928 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20929 d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); 20930 } 20931 20932 //=== VK_HUAWEI_invocation_mask === 20933 20934 template <typename Dispatch> bindInvocationMaskHUAWEI(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const20935 VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, 20936 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 20937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20938 { 20939 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20940 d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 20941 } 20942 20943 //=== VK_NV_external_memory_rdma === 20944 20945 template <typename Dispatch> 20946 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,Dispatch const & d) const20947 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, 20948 VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, 20949 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20950 { 20951 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20952 return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( 20953 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); 20954 } 20955 20956 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20957 template <typename Dispatch> 20958 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo,Dispatch const & d) const20959 Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const 20960 { 20961 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20962 20963 VULKAN_HPP_NAMESPACE::RemoteAddressNV address; 20964 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetMemoryRemoteAddressNV( 20965 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) ) ); 20966 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); 20967 20968 return createResultValueType( result, address ); 20969 } 20970 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20971 20972 //=== VK_EXT_pipeline_properties === 20973 20974 template <typename Dispatch> getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,Dispatch const & d) const20975 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, 20976 VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, 20977 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 20978 { 20979 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20980 return static_cast<Result>( d.vkGetPipelinePropertiesEXT( 20981 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); 20982 } 20983 20984 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 20985 template <typename Dispatch> 20986 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo,Dispatch const & d) const20987 Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const 20988 { 20989 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 20990 20991 VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; 20992 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPipelinePropertiesEXT( 20993 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) ); 20994 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); 20995 20996 return createResultValueType( result, pipelineProperties ); 20997 } 20998 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 20999 21000 //=== VK_EXT_extended_dynamic_state2 === 21001 21002 template <typename Dispatch> setPatchControlPointsEXT(uint32_t patchControlPoints,Dispatch const & d) const21003 VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21004 { 21005 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21006 d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); 21007 } 21008 21009 template <typename Dispatch> setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const21010 VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 21011 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21012 { 21013 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21014 d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 21015 } 21016 21017 template <typename Dispatch> setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const21018 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21019 { 21020 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21021 d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 21022 } 21023 21024 template <typename Dispatch> setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp,Dispatch const & d) const21025 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21026 { 21027 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21028 d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) ); 21029 } 21030 21031 template <typename Dispatch> setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const21032 VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 21033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21034 { 21035 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21036 d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 21037 } 21038 21039 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 21040 //=== VK_QNX_screen_surface === 21041 21042 template <typename Dispatch> createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const21043 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, 21044 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21045 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 21046 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21047 { 21048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21049 return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, 21050 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), 21051 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21052 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 21053 } 21054 21055 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21056 template <typename Dispatch> 21057 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21058 Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 21059 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21060 Dispatch const & d ) const 21061 { 21062 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21063 21064 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 21065 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 21066 m_instance, 21067 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 21068 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21069 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 21070 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); 21071 21072 return createResultValueType( result, surface ); 21073 } 21074 21075 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21076 template <typename Dispatch> 21077 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createScreenSurfaceQNXUnique(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21078 Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, 21079 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21080 Dispatch const & d ) const 21081 { 21082 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21083 21084 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 21085 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateScreenSurfaceQNX( 21086 m_instance, 21087 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 21088 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21089 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 21090 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); 21091 21092 return createResultValueType( 21093 result, UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); 21094 } 21095 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21096 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21097 21098 template <typename Dispatch> getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window * window,Dispatch const & d) const21099 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, 21100 struct _screen_window * window, 21101 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21102 { 21103 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21104 return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); 21105 } 21106 21107 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21108 template <typename Dispatch> 21109 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window & window,Dispatch const & d) const21110 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21111 { 21112 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21113 21114 VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); 21115 21116 return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); 21117 } 21118 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21119 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 21120 21121 //=== VK_EXT_color_write_enable === 21122 21123 template <typename Dispatch> setColorWriteEnableEXT(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,Dispatch const & d) const21124 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, 21125 const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, 21126 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21127 { 21128 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21129 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); 21130 } 21131 21132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21133 template <typename Dispatch> setColorWriteEnableEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,Dispatch const & d) const21134 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, 21135 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21136 { 21137 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21138 21139 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) ); 21140 } 21141 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21142 21143 //=== VK_KHR_ray_tracing_maintenance1 === 21144 21145 template <typename Dispatch> traceRaysIndirect2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const21146 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 21147 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21148 { 21149 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21150 d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 21151 } 21152 21153 //=== VK_EXT_multi_draw === 21154 21155 template <typename Dispatch> drawMultiEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Dispatch const & d) const21156 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, 21157 const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, 21158 uint32_t instanceCount, 21159 uint32_t firstInstance, 21160 uint32_t stride, 21161 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21162 { 21163 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21164 d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride ); 21165 } 21166 21167 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21168 template <typename Dispatch> drawMultiEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,uint32_t instanceCount,uint32_t firstInstance,Dispatch const & d) const21169 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, 21170 uint32_t instanceCount, 21171 uint32_t firstInstance, 21172 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21173 { 21174 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21175 21176 d.vkCmdDrawMultiEXT( m_commandBuffer, 21177 vertexInfo.size(), 21178 reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), 21179 instanceCount, 21180 firstInstance, 21181 vertexInfo.stride() ); 21182 } 21183 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21184 21185 template <typename Dispatch> drawMultiIndexedEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset,Dispatch const & d) const21186 VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, 21187 const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, 21188 uint32_t instanceCount, 21189 uint32_t firstInstance, 21190 uint32_t stride, 21191 const int32_t * pVertexOffset, 21192 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21193 { 21194 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21195 d.vkCmdDrawMultiIndexedEXT( 21196 m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); 21197 } 21198 21199 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21200 template <typename Dispatch> 21201 VULKAN_HPP_INLINE void drawMultiIndexedEXT(VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,uint32_t instanceCount,uint32_t firstInstance,Optional<const int32_t> vertexOffset,Dispatch const & d) const21202 CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, 21203 uint32_t instanceCount, 21204 uint32_t firstInstance, 21205 Optional<const int32_t> vertexOffset, 21206 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21207 { 21208 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21209 21210 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, 21211 indexInfo.size(), 21212 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), 21213 instanceCount, 21214 firstInstance, 21215 indexInfo.stride(), 21216 static_cast<const int32_t *>( vertexOffset ) ); 21217 } 21218 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21219 21220 //=== VK_EXT_opacity_micromap === 21221 21222 template <typename Dispatch> createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,Dispatch const & d) const21223 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, 21224 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21225 VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, 21226 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21227 { 21228 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21229 return static_cast<Result>( d.vkCreateMicromapEXT( m_device, 21230 reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), 21231 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 21232 reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) ); 21233 } 21234 21235 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21236 template <typename Dispatch> 21237 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21238 Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 21239 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21240 Dispatch const & d ) const 21241 { 21242 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21243 21244 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 21245 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21246 d.vkCreateMicromapEXT( m_device, 21247 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 21248 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21249 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 21250 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); 21251 21252 return createResultValueType( result, micromap ); 21253 } 21254 21255 # ifndef VULKAN_HPP_NO_SMART_HANDLE 21256 template <typename Dispatch> 21257 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type createMicromapEXTUnique(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21258 Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, 21259 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21260 Dispatch const & d ) const 21261 { 21262 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21263 21264 VULKAN_HPP_NAMESPACE::MicromapEXT micromap; 21265 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21266 d.vkCreateMicromapEXT( m_device, 21267 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), 21268 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 21269 reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); 21270 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); 21271 21272 return createResultValueType( 21273 result, UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 21274 } 21275 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 21276 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21277 21278 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21279 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 21280 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21282 { 21283 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21284 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21285 } 21286 21287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21288 template <typename Dispatch> destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21289 VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 21290 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21292 { 21293 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21294 21295 d.vkDestroyMicromapEXT( m_device, 21296 static_cast<VkMicromapEXT>( micromap ), 21297 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21298 } 21299 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21300 21301 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const21302 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 21303 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 21304 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21305 { 21306 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21307 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 21308 } 21309 21310 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21311 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::MicromapEXT micromap,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const21312 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, 21313 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 21314 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21315 { 21316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21317 21318 d.vkDestroyMicromapEXT( m_device, 21319 static_cast<VkMicromapEXT>( micromap ), 21320 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 21321 } 21322 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21323 21324 template <typename Dispatch> buildMicromapsEXT(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const21325 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, 21326 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 21327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21328 { 21329 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21330 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); 21331 } 21332 21333 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21334 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const21335 VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 21336 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21337 { 21338 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21339 21340 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ); 21341 } 21342 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21343 21344 template <typename Dispatch> buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,Dispatch const & d) const21345 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 21346 uint32_t infoCount, 21347 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, 21348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21349 { 21350 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21351 return static_cast<Result>( d.vkBuildMicromapsEXT( 21352 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); 21353 } 21354 21355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21356 template <typename Dispatch> 21357 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,Dispatch const & d) const21358 Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 21359 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos, 21360 Dispatch const & d ) const 21361 { 21362 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21363 21364 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBuildMicromapsEXT( 21365 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) ); 21366 resultCheck( 21367 result, 21368 VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", 21369 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 21370 21371 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 21372 } 21373 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21374 21375 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const21376 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 21377 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, 21378 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21379 { 21380 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21381 return static_cast<Result>( 21382 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); 21383 } 21384 21385 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21386 template <typename Dispatch> copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const21387 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 21388 const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, 21389 Dispatch const & d ) const 21390 { 21391 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21392 21393 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 21394 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) ); 21395 resultCheck( 21396 result, 21397 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", 21398 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 21399 21400 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 21401 } 21402 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21403 21404 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const21405 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 21406 const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 21407 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21408 { 21409 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21410 return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( 21411 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); 21412 } 21413 21414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21415 template <typename Dispatch> copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const21416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( 21417 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const 21418 { 21419 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21420 21421 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMicromapToMemoryEXT( 21422 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) ); 21423 resultCheck( 21424 result, 21425 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", 21426 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 21427 21428 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 21429 } 21430 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21431 21432 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const21433 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 21434 const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 21435 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21436 { 21437 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21438 return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( 21439 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); 21440 } 21441 21442 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21443 template <typename Dispatch> copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const21444 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( 21445 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const 21446 { 21447 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21448 21449 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCopyMemoryToMicromapEXT( 21450 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) ); 21451 resultCheck( 21452 result, 21453 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", 21454 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 21455 21456 return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); 21457 } 21458 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21459 21460 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const21461 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, 21462 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 21463 VULKAN_HPP_NAMESPACE::QueryType queryType, 21464 size_t dataSize, 21465 void * pData, 21466 size_t stride, 21467 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21468 { 21469 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21470 return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( 21471 m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) ); 21472 } 21473 21474 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21475 template <typename DataType, typename DataTypeAllocator, typename Dispatch> 21476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const21477 Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 21478 VULKAN_HPP_NAMESPACE::QueryType queryType, 21479 size_t dataSize, 21480 size_t stride, 21481 Dispatch const & d ) const 21482 { 21483 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21484 21485 VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); 21486 std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); 21487 VULKAN_HPP_NAMESPACE::Result result = 21488 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 21489 micromaps.size(), 21490 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 21491 static_cast<VkQueryType>( queryType ), 21492 data.size() * sizeof( DataType ), 21493 reinterpret_cast<void *>( data.data() ), 21494 stride ) ); 21495 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); 21496 21497 return createResultValueType( result, data ); 21498 } 21499 21500 template <typename DataType, typename Dispatch> 21501 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type writeMicromapsPropertyEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const21502 Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 21503 VULKAN_HPP_NAMESPACE::QueryType queryType, 21504 size_t stride, 21505 Dispatch const & d ) const 21506 { 21507 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21508 21509 DataType data; 21510 VULKAN_HPP_NAMESPACE::Result result = 21511 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWriteMicromapsPropertiesEXT( m_device, 21512 micromaps.size(), 21513 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 21514 static_cast<VkQueryType>( queryType ), 21515 sizeof( DataType ), 21516 reinterpret_cast<void *>( &data ), 21517 stride ) ); 21518 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); 21519 21520 return createResultValueType( result, data ); 21521 } 21522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21523 21524 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,Dispatch const & d) const21525 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21526 { 21527 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21528 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); 21529 } 21530 21531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21532 template <typename Dispatch> copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,Dispatch const & d) const21533 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21534 { 21535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21536 21537 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ); 21538 } 21539 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21540 21541 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,Dispatch const & d) const21542 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, 21543 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21544 { 21545 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21546 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); 21547 } 21548 21549 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21550 template <typename Dispatch> copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,Dispatch const & d) const21551 VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, 21552 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21553 { 21554 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21555 21556 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ); 21557 } 21558 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21559 21560 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,Dispatch const & d) const21561 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, 21562 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21563 { 21564 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21565 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); 21566 } 21567 21568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21569 template <typename Dispatch> copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,Dispatch const & d) const21570 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, 21571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21572 { 21573 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21574 21575 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ); 21576 } 21577 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21578 21579 template <typename Dispatch> writeMicromapsPropertiesEXT(uint32_t micromapCount,const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const21580 VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, 21581 const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, 21582 VULKAN_HPP_NAMESPACE::QueryType queryType, 21583 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 21584 uint32_t firstQuery, 21585 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21586 { 21587 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21588 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 21589 micromapCount, 21590 reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), 21591 static_cast<VkQueryType>( queryType ), 21592 static_cast<VkQueryPool>( queryPool ), 21593 firstQuery ); 21594 } 21595 21596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21597 template <typename Dispatch> 21598 VULKAN_HPP_INLINE void writeMicromapsPropertiesEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const21599 CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps, 21600 VULKAN_HPP_NAMESPACE::QueryType queryType, 21601 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 21602 uint32_t firstQuery, 21603 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21604 { 21605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21606 21607 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, 21608 micromaps.size(), 21609 reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), 21610 static_cast<VkQueryType>( queryType ), 21611 static_cast<VkQueryPool>( queryPool ), 21612 firstQuery ); 21613 } 21614 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21615 21616 template <typename Dispatch> getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const21617 VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, 21618 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 21619 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21620 { 21621 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21622 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 21623 reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), 21624 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 21625 } 21626 21627 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21628 template <typename Dispatch> 21629 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo,Dispatch const & d) const21630 Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21631 { 21632 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21633 21634 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 21635 d.vkGetDeviceMicromapCompatibilityEXT( m_device, 21636 reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), 21637 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 21638 21639 return compatibility; 21640 } 21641 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21642 21643 template <typename Dispatch> getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,Dispatch const & d) const21644 VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 21645 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, 21646 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, 21647 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21648 { 21649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21650 d.vkGetMicromapBuildSizesEXT( m_device, 21651 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 21652 reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), 21653 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) ); 21654 } 21655 21656 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21657 template <typename Dispatch> 21658 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,Dispatch const & d) const21659 Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 21660 const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, 21661 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21662 { 21663 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21664 21665 VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; 21666 d.vkGetMicromapBuildSizesEXT( m_device, 21667 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 21668 reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), 21669 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) ); 21670 21671 return sizeInfo; 21672 } 21673 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21674 21675 //=== VK_HUAWEI_cluster_culling_shader === 21676 21677 template <typename Dispatch> 21678 VULKAN_HPP_INLINE void drawClusterHUAWEI(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const21679 CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21680 { 21681 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21682 d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 21683 } 21684 21685 template <typename Dispatch> drawClusterIndirectHUAWEI(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const21686 VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, 21687 VULKAN_HPP_NAMESPACE::DeviceSize offset, 21688 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21689 { 21690 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21691 d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 21692 } 21693 21694 //=== VK_EXT_pageable_device_local_memory === 21695 21696 template <typename Dispatch> setMemoryPriorityEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory,float priority,Dispatch const & d) const21697 VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21698 { 21699 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21700 d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority ); 21701 } 21702 21703 //=== VK_KHR_maintenance4 === 21704 21705 template <typename Dispatch> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const21706 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, 21707 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 21708 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21709 { 21710 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21711 d.vkGetDeviceBufferMemoryRequirementsKHR( 21712 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 21713 } 21714 21715 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21716 template <typename Dispatch> 21717 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const21718 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21719 { 21720 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21721 21722 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 21723 d.vkGetDeviceBufferMemoryRequirementsKHR( 21724 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 21725 21726 return memoryRequirements; 21727 } 21728 21729 template <typename X, typename Y, typename... Z, typename Dispatch> 21730 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const21731 Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21732 { 21733 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21734 21735 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 21736 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 21737 d.vkGetDeviceBufferMemoryRequirementsKHR( 21738 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 21739 21740 return structureChain; 21741 } 21742 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21743 21744 template <typename Dispatch> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const21745 VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 21746 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 21747 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21748 { 21749 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21750 d.vkGetDeviceImageMemoryRequirementsKHR( 21751 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 21752 } 21753 21754 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21755 template <typename Dispatch> 21756 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const21757 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21758 { 21759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21760 21761 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 21762 d.vkGetDeviceImageMemoryRequirementsKHR( 21763 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 21764 21765 return memoryRequirements; 21766 } 21767 21768 template <typename X, typename Y, typename... Z, typename Dispatch> 21769 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const21770 Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21771 { 21772 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21773 21774 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 21775 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 21776 d.vkGetDeviceImageMemoryRequirementsKHR( 21777 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 21778 21779 return structureChain; 21780 } 21781 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21782 21783 template <typename Dispatch> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const21784 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, 21785 uint32_t * pSparseMemoryRequirementCount, 21786 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 21787 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21788 { 21789 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21790 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 21791 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), 21792 pSparseMemoryRequirementCount, 21793 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 21794 } 21795 21796 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21797 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 21798 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const21799 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const 21800 { 21801 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21802 21803 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 21804 uint32_t sparseMemoryRequirementCount; 21805 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 21806 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 21807 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 21808 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 21809 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 21810 &sparseMemoryRequirementCount, 21811 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 21812 21813 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 21814 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 21815 { 21816 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 21817 } 21818 return sparseMemoryRequirements; 21819 } 21820 21821 template <typename SparseImageMemoryRequirements2Allocator, 21822 typename Dispatch, 21823 typename B1, 21824 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, int>::type> 21825 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const21826 Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, 21827 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 21828 Dispatch const & d ) const 21829 { 21830 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21831 21832 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 21833 sparseImageMemoryRequirements2Allocator ); 21834 uint32_t sparseMemoryRequirementCount; 21835 d.vkGetDeviceImageSparseMemoryRequirementsKHR( 21836 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); 21837 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 21838 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, 21839 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), 21840 &sparseMemoryRequirementCount, 21841 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 21842 21843 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 21844 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) 21845 { 21846 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 21847 } 21848 return sparseMemoryRequirements; 21849 } 21850 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21851 21852 //=== VK_VALVE_descriptor_set_host_mapping === 21853 21854 template <typename Dispatch> getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,Dispatch const & d) const21855 VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, 21856 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, 21857 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21858 { 21859 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21860 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 21861 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), 21862 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) ); 21863 } 21864 21865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21866 template <typename Dispatch> 21867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,Dispatch const & d) const21868 Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, 21869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21870 { 21871 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21872 21873 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; 21874 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, 21875 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), 21876 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) ); 21877 21878 return hostMapping; 21879 } 21880 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21881 21882 template <typename Dispatch> 21883 VULKAN_HPP_INLINE void getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,void ** ppData,Dispatch const & d) const21884 Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21885 { 21886 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21887 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData ); 21888 } 21889 21890 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21891 template <typename Dispatch> getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,Dispatch const & d) const21892 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 21893 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21894 { 21895 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21896 21897 void * pData; 21898 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData ); 21899 21900 return pData; 21901 } 21902 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21903 21904 //=== VK_NV_copy_memory_indirect === 21905 21906 template <typename Dispatch> copyMemoryIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,Dispatch const & d) const21907 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 21908 uint32_t copyCount, 21909 uint32_t stride, 21910 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21911 { 21912 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21913 d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); 21914 } 21915 21916 template <typename Dispatch> copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t copyCount,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,Dispatch const & d) const21917 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 21918 uint32_t copyCount, 21919 uint32_t stride, 21920 VULKAN_HPP_NAMESPACE::Image dstImage, 21921 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 21922 const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, 21923 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21924 { 21925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21926 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 21927 static_cast<VkDeviceAddress>( copyBufferAddress ), 21928 copyCount, 21929 stride, 21930 static_cast<VkImage>( dstImage ), 21931 static_cast<VkImageLayout>( dstImageLayout ), 21932 reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) ); 21933 } 21934 21935 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21936 template <typename Dispatch> 21937 VULKAN_HPP_INLINE void copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,uint32_t stride,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources,Dispatch const & d) const21938 CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, 21939 uint32_t stride, 21940 VULKAN_HPP_NAMESPACE::Image dstImage, 21941 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 21942 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources, 21943 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21944 { 21945 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21946 21947 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, 21948 static_cast<VkDeviceAddress>( copyBufferAddress ), 21949 imageSubresources.size(), 21950 stride, 21951 static_cast<VkImage>( dstImage ), 21952 static_cast<VkImageLayout>( dstImageLayout ), 21953 reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) ); 21954 } 21955 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21956 21957 //=== VK_NV_memory_decompression === 21958 21959 template <typename Dispatch> decompressMemoryNV(uint32_t decompressRegionCount,const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,Dispatch const & d) const21960 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, 21961 const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, 21962 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21963 { 21964 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21965 d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); 21966 } 21967 21968 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 21969 template <typename Dispatch> 21970 VULKAN_HPP_INLINE void decompressMemoryNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions,Dispatch const & d) const21971 CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions, 21972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21973 { 21974 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21975 21976 d.vkCmdDecompressMemoryNV( 21977 m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) ); 21978 } 21979 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 21980 21981 template <typename Dispatch> decompressMemoryIndirectCountNV(VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,uint32_t stride,Dispatch const & d) const21982 VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, 21983 VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, 21984 uint32_t stride, 21985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21986 { 21987 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 21988 d.vkCmdDecompressMemoryIndirectCountNV( 21989 m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride ); 21990 } 21991 21992 //=== VK_NV_device_generated_commands_compute === 21993 21994 template <typename Dispatch> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const21995 VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, 21996 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 21997 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 21998 { 21999 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22000 d.vkGetPipelineIndirectMemoryRequirementsNV( 22001 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 22002 } 22003 22004 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22005 template <typename Dispatch> 22006 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const22007 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 22008 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22009 { 22010 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22011 22012 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 22013 d.vkGetPipelineIndirectMemoryRequirementsNV( 22014 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 22015 22016 return memoryRequirements; 22017 } 22018 22019 template <typename X, typename Y, typename... Z, typename Dispatch> 22020 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getPipelineIndirectMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Dispatch const & d) const22021 Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 22022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22023 { 22024 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22025 22026 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 22027 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 22028 d.vkGetPipelineIndirectMemoryRequirementsNV( 22029 m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 22030 22031 return structureChain; 22032 } 22033 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22034 22035 template <typename Dispatch> updatePipelineIndirectBufferNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const22036 VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 22037 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 22038 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22039 { 22040 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22041 d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 22042 } 22043 22044 template <typename Dispatch> getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,Dispatch const & d) const22045 VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, 22046 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22047 { 22048 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22049 return static_cast<DeviceAddress>( 22050 d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); 22051 } 22052 22053 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22054 template <typename Dispatch> 22055 VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV(const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info,Dispatch const & d) const22056 Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22057 { 22058 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22059 22060 VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) ); 22061 22062 return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); 22063 } 22064 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22065 22066 //=== VK_EXT_extended_dynamic_state3 === 22067 22068 template <typename Dispatch> setTessellationDomainOriginEXT(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,Dispatch const & d) const22069 VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, 22070 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22071 { 22072 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22073 d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); 22074 } 22075 22076 template <typename Dispatch> setDepthClampEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable,Dispatch const & d) const22077 VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22078 { 22079 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22080 d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) ); 22081 } 22082 22083 template <typename Dispatch> setPolygonModeEXT(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode,Dispatch const & d) const22084 VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22085 { 22086 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22087 d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) ); 22088 } 22089 22090 template <typename Dispatch> setRasterizationSamplesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,Dispatch const & d) const22091 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, 22092 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22093 { 22094 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22095 d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); 22096 } 22097 22098 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,Dispatch const & d) const22099 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 22100 const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, 22101 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22102 { 22103 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22104 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); 22105 } 22106 22107 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22108 template <typename Dispatch> setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,Dispatch const & d) const22109 VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 22110 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask, 22111 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 22112 { 22113 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22114 # ifdef VULKAN_HPP_NO_EXCEPTIONS 22115 VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 ); 22116 # else 22117 if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 ) 22118 { 22119 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" ); 22120 } 22121 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 22122 22123 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) ); 22124 } 22125 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22126 22127 template <typename Dispatch> setAlphaToCoverageEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,Dispatch const & d) const22128 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, 22129 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22130 { 22131 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22132 d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) ); 22133 } 22134 22135 template <typename Dispatch> setAlphaToOneEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable,Dispatch const & d) const22136 VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22137 { 22138 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22139 d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) ); 22140 } 22141 22142 template <typename Dispatch> setLogicOpEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable,Dispatch const & d) const22143 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22144 { 22145 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22146 d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) ); 22147 } 22148 22149 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,Dispatch const & d) const22150 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 22151 uint32_t attachmentCount, 22152 const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, 22153 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22154 { 22155 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22156 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); 22157 } 22158 22159 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22160 template <typename Dispatch> setColorBlendEnableEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,Dispatch const & d) const22161 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, 22162 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables, 22163 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22164 { 22165 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22166 22167 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) ); 22168 } 22169 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22170 22171 template <typename Dispatch> setColorBlendEquationEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,Dispatch const & d) const22172 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 22173 uint32_t attachmentCount, 22174 const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, 22175 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22176 { 22177 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22178 d.vkCmdSetColorBlendEquationEXT( 22179 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); 22180 } 22181 22182 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22183 template <typename Dispatch> 22184 VULKAN_HPP_INLINE void setColorBlendEquationEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,Dispatch const & d) const22185 CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, 22186 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations, 22187 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22188 { 22189 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22190 22191 d.vkCmdSetColorBlendEquationEXT( 22192 m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) ); 22193 } 22194 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22195 22196 template <typename Dispatch> setColorWriteMaskEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,Dispatch const & d) const22197 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 22198 uint32_t attachmentCount, 22199 const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, 22200 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22201 { 22202 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22203 d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); 22204 } 22205 22206 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22207 template <typename Dispatch> 22208 VULKAN_HPP_INLINE void setColorWriteMaskEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,Dispatch const & d) const22209 CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, 22210 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks, 22211 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22212 { 22213 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22214 22215 d.vkCmdSetColorWriteMaskEXT( 22216 m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) ); 22217 } 22218 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22219 22220 template <typename Dispatch> setRasterizationStreamEXT(uint32_t rasterizationStream,Dispatch const & d) const22221 VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22222 { 22223 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22224 d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream ); 22225 } 22226 22227 template <typename Dispatch> 22228 VULKAN_HPP_INLINE void setConservativeRasterizationModeEXT(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,Dispatch const & d) const22229 CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, 22230 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22231 { 22232 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22233 d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); 22234 } 22235 22236 template <typename Dispatch> setExtraPrimitiveOverestimationSizeEXT(float extraPrimitiveOverestimationSize,Dispatch const & d) const22237 VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, 22238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22239 { 22240 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22241 d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize ); 22242 } 22243 22244 template <typename Dispatch> setDepthClipEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable,Dispatch const & d) const22245 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22246 { 22247 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22248 d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) ); 22249 } 22250 22251 template <typename Dispatch> setSampleLocationsEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,Dispatch const & d) const22252 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, 22253 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22254 { 22255 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22256 d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) ); 22257 } 22258 22259 template <typename Dispatch> setColorBlendAdvancedEXT(uint32_t firstAttachment,uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,Dispatch const & d) const22260 VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 22261 uint32_t attachmentCount, 22262 const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, 22263 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22264 { 22265 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22266 d.vkCmdSetColorBlendAdvancedEXT( 22267 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); 22268 } 22269 22270 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22271 template <typename Dispatch> 22272 VULKAN_HPP_INLINE void setColorBlendAdvancedEXT(uint32_t firstAttachment,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,Dispatch const & d) const22273 CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, 22274 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced, 22275 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22276 { 22277 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22278 22279 d.vkCmdSetColorBlendAdvancedEXT( 22280 m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) ); 22281 } 22282 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22283 22284 template <typename Dispatch> setProvokingVertexModeEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,Dispatch const & d) const22285 VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, 22286 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22287 { 22288 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22289 d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); 22290 } 22291 22292 template <typename Dispatch> setLineRasterizationModeEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,Dispatch const & d) const22293 VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, 22294 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22295 { 22296 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22297 d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); 22298 } 22299 22300 template <typename Dispatch> setLineStippleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable,Dispatch const & d) const22301 VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22302 { 22303 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22304 d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) ); 22305 } 22306 22307 template <typename Dispatch> setDepthClipNegativeOneToOneEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,Dispatch const & d) const22308 VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, 22309 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22310 { 22311 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22312 d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) ); 22313 } 22314 22315 template <typename Dispatch> setViewportWScalingEnableNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,Dispatch const & d) const22316 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, 22317 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22318 { 22319 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22320 d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) ); 22321 } 22322 22323 template <typename Dispatch> setViewportSwizzleNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,Dispatch const & d) const22324 VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 22325 uint32_t viewportCount, 22326 const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, 22327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22328 { 22329 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22330 d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); 22331 } 22332 22333 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22334 template <typename Dispatch> 22335 VULKAN_HPP_INLINE void setViewportSwizzleNV(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,Dispatch const & d) const22336 CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, 22337 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles, 22338 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22339 { 22340 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22341 22342 d.vkCmdSetViewportSwizzleNV( 22343 m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) ); 22344 } 22345 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22346 22347 template <typename Dispatch> setCoverageToColorEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,Dispatch const & d) const22348 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, 22349 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22350 { 22351 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22352 d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) ); 22353 } 22354 22355 template <typename Dispatch> setCoverageToColorLocationNV(uint32_t coverageToColorLocation,Dispatch const & d) const22356 VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22357 { 22358 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22359 d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation ); 22360 } 22361 22362 template <typename Dispatch> setCoverageModulationModeNV(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,Dispatch const & d) const22363 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, 22364 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22365 { 22366 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22367 d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); 22368 } 22369 22370 template <typename Dispatch> setCoverageModulationTableEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,Dispatch const & d) const22371 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, 22372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22373 { 22374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22375 d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) ); 22376 } 22377 22378 template <typename Dispatch> setCoverageModulationTableNV(uint32_t coverageModulationTableCount,const float * pCoverageModulationTable,Dispatch const & d) const22379 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, 22380 const float * pCoverageModulationTable, 22381 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22382 { 22383 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22384 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); 22385 } 22386 22387 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22388 template <typename Dispatch> setCoverageModulationTableNV(VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,Dispatch const & d) const22389 VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable, 22390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22391 { 22392 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22393 22394 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); 22395 } 22396 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22397 22398 template <typename Dispatch> setShadingRateImageEnableNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,Dispatch const & d) const22399 VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, 22400 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22401 { 22402 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22403 d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) ); 22404 } 22405 22406 template <typename Dispatch> setRepresentativeFragmentTestEnableNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,Dispatch const & d) const22407 VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, 22408 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22409 { 22410 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22411 d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) ); 22412 } 22413 22414 template <typename Dispatch> setCoverageReductionModeNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,Dispatch const & d) const22415 VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, 22416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22417 { 22418 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22419 d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); 22420 } 22421 22422 //=== VK_EXT_shader_module_identifier === 22423 22424 template <typename Dispatch> getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const22425 VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 22426 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 22427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22428 { 22429 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22430 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 22431 } 22432 22433 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22434 template <typename Dispatch> 22435 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Dispatch const & d) const22436 Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22437 { 22438 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22439 22440 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 22441 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 22442 22443 return identifier; 22444 } 22445 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22446 22447 template <typename Dispatch> getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,Dispatch const & d) const22448 VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 22449 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, 22450 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22451 { 22452 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22453 d.vkGetShaderModuleCreateInfoIdentifierEXT( 22454 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); 22455 } 22456 22457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22458 template <typename Dispatch> 22459 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,Dispatch const & d) const22460 Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, 22461 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22462 { 22463 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22464 22465 VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; 22466 d.vkGetShaderModuleCreateInfoIdentifierEXT( 22467 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); 22468 22469 return identifier; 22470 } 22471 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22472 22473 //=== VK_NV_optical_flow === 22474 22475 template <typename Dispatch> 22476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,uint32_t * pFormatCount,VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,Dispatch const & d) const22477 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, 22478 uint32_t * pFormatCount, 22479 VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, 22480 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22481 { 22482 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22483 return static_cast<Result>( 22484 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 22485 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), 22486 pFormatCount, 22487 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) ); 22488 } 22489 22490 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22491 template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch> 22492 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22493 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,Dispatch const & d) const22494 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 22495 Dispatch const & d ) const 22496 { 22497 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22498 22499 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties; 22500 uint32_t formatCount; 22501 VULKAN_HPP_NAMESPACE::Result result; 22502 do 22503 { 22504 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 22505 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 22506 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 22507 { 22508 imageFormatProperties.resize( formatCount ); 22509 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22510 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 22511 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 22512 &formatCount, 22513 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 22514 } 22515 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22516 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 22517 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 22518 if ( formatCount < imageFormatProperties.size() ) 22519 { 22520 imageFormatProperties.resize( formatCount ); 22521 } 22522 return createResultValueType( result, imageFormatProperties ); 22523 } 22524 22525 template <typename OpticalFlowImageFormatPropertiesNVAllocator, 22526 typename Dispatch, 22527 typename B1, 22528 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value, int>::type> 22529 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22530 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,Dispatch const & d) const22531 PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, 22532 OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, 22533 Dispatch const & d ) const 22534 { 22535 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22536 22537 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties( 22538 opticalFlowImageFormatPropertiesNVAllocator ); 22539 uint32_t formatCount; 22540 VULKAN_HPP_NAMESPACE::Result result; 22541 do 22542 { 22543 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( 22544 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); 22545 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) 22546 { 22547 imageFormatProperties.resize( formatCount ); 22548 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22549 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, 22550 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), 22551 &formatCount, 22552 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); 22553 } 22554 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 22555 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); 22556 VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); 22557 if ( formatCount < imageFormatProperties.size() ) 22558 { 22559 imageFormatProperties.resize( formatCount ); 22560 } 22561 return createResultValueType( result, imageFormatProperties ); 22562 } 22563 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22564 22565 template <typename Dispatch> createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,Dispatch const & d) const22566 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, 22567 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22568 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, 22569 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22570 { 22571 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22572 return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, 22573 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), 22574 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22575 reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) ); 22576 } 22577 22578 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22579 template <typename Dispatch> 22580 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22581 Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 22582 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22583 Dispatch const & d ) const 22584 { 22585 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22586 22587 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 22588 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 22589 m_device, 22590 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 22591 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22592 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 22593 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); 22594 22595 return createResultValueType( result, session ); 22596 } 22597 22598 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22599 template <typename Dispatch> 22600 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type createOpticalFlowSessionNVUnique(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22601 Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, 22602 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22603 Dispatch const & d ) const 22604 { 22605 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22606 22607 VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; 22608 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateOpticalFlowSessionNV( 22609 m_device, 22610 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), 22611 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22612 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); 22613 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); 22614 22615 return createResultValueType( 22616 result, UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 22617 } 22618 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 22619 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22620 22621 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22622 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22623 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22624 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22625 { 22626 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22627 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22628 } 22629 22630 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22631 template <typename Dispatch> destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22632 VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22633 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22634 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22635 { 22636 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22637 22638 d.vkDestroyOpticalFlowSessionNV( 22639 m_device, 22640 static_cast<VkOpticalFlowSessionNV>( session ), 22641 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22642 } 22643 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22644 22645 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const22646 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22647 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22648 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22649 { 22650 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22651 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 22652 } 22653 22654 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22655 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22656 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22657 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22658 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22659 { 22660 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22661 22662 d.vkDestroyOpticalFlowSessionNV( 22663 m_device, 22664 static_cast<VkOpticalFlowSessionNV>( session ), 22665 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 22666 } 22667 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22668 22669 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 22670 template <typename Dispatch> bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const22671 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22672 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 22673 VULKAN_HPP_NAMESPACE::ImageView view, 22674 VULKAN_HPP_NAMESPACE::ImageLayout layout, 22675 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22676 { 22677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22678 return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 22679 static_cast<VkOpticalFlowSessionNV>( session ), 22680 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 22681 static_cast<VkImageView>( view ), 22682 static_cast<VkImageLayout>( layout ) ) ); 22683 } 22684 #else 22685 template <typename Dispatch> 22686 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,VULKAN_HPP_NAMESPACE::ImageView view,VULKAN_HPP_NAMESPACE::ImageLayout layout,Dispatch const & d) const22687 Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22688 VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, 22689 VULKAN_HPP_NAMESPACE::ImageView view, 22690 VULKAN_HPP_NAMESPACE::ImageLayout layout, 22691 Dispatch const & d ) const 22692 { 22693 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22694 22695 VULKAN_HPP_NAMESPACE::Result result = 22696 static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindOpticalFlowSessionImageNV( m_device, 22697 static_cast<VkOpticalFlowSessionNV>( session ), 22698 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), 22699 static_cast<VkImageView>( view ), 22700 static_cast<VkImageLayout>( layout ) ) ); 22701 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); 22702 22703 return createResultValueType( result ); 22704 } 22705 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 22706 22707 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,Dispatch const & d) const22708 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22709 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, 22710 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22711 { 22712 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22713 d.vkCmdOpticalFlowExecuteNV( 22714 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); 22715 } 22716 22717 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22718 template <typename Dispatch> opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,Dispatch const & d) const22719 VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, 22720 const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, 22721 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22722 { 22723 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22724 22725 d.vkCmdOpticalFlowExecuteNV( 22726 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) ); 22727 } 22728 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22729 22730 //=== VK_KHR_maintenance5 === 22731 22732 template <typename Dispatch> bindIndexBuffer2KHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const22733 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 22734 VULKAN_HPP_NAMESPACE::DeviceSize offset, 22735 VULKAN_HPP_NAMESPACE::DeviceSize size, 22736 VULKAN_HPP_NAMESPACE::IndexType indexType, 22737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22738 { 22739 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22740 d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, 22741 static_cast<VkBuffer>( buffer ), 22742 static_cast<VkDeviceSize>( offset ), 22743 static_cast<VkDeviceSize>( size ), 22744 static_cast<VkIndexType>( indexType ) ); 22745 } 22746 22747 template <typename Dispatch> getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const22748 VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, 22749 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 22750 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22751 { 22752 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22753 d.vkGetRenderingAreaGranularityKHR( 22754 m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 22755 } 22756 22757 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22758 template <typename Dispatch> 22759 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderingAreaGranularityKHR(const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo,Dispatch const & d) const22760 Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22761 { 22762 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22763 22764 VULKAN_HPP_NAMESPACE::Extent2D granularity; 22765 d.vkGetRenderingAreaGranularityKHR( 22766 m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 22767 22768 return granularity; 22769 } 22770 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22771 22772 template <typename Dispatch> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,Dispatch const & d) const22773 VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, 22774 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, 22775 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22776 { 22777 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22778 d.vkGetDeviceImageSubresourceLayoutKHR( 22779 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); 22780 } 22781 22782 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22783 template <typename Dispatch> 22784 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,Dispatch const & d) const22785 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22786 { 22787 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22788 22789 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; 22790 d.vkGetDeviceImageSubresourceLayoutKHR( 22791 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 22792 22793 return layout; 22794 } 22795 22796 template <typename X, typename Y, typename... Z, typename Dispatch> 22797 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getImageSubresourceLayoutKHR(const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,Dispatch const & d) const22798 Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22799 { 22800 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22801 22802 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 22803 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>(); 22804 d.vkGetDeviceImageSubresourceLayoutKHR( 22805 m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 22806 22807 return structureChain; 22808 } 22809 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22810 22811 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,Dispatch const & d) const22812 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, 22813 const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, 22814 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, 22815 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22816 { 22817 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22818 d.vkGetImageSubresourceLayout2KHR( m_device, 22819 static_cast<VkImage>( image ), 22820 reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ), 22821 reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) ); 22822 } 22823 22824 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22825 template <typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const22826 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR( 22827 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22828 { 22829 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22830 22831 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; 22832 d.vkGetImageSubresourceLayout2KHR( m_device, 22833 static_cast<VkImage>( image ), 22834 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 22835 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 22836 22837 return layout; 22838 } 22839 22840 template <typename X, typename Y, typename... Z, typename Dispatch> getImageSubresourceLayout2KHR(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,Dispatch const & d) const22841 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR( 22842 VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22843 { 22844 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22845 22846 VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain; 22847 VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>(); 22848 d.vkGetImageSubresourceLayout2KHR( m_device, 22849 static_cast<VkImage>( image ), 22850 reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ), 22851 reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) ); 22852 22853 return structureChain; 22854 } 22855 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 22856 22857 //=== VK_EXT_shader_object === 22858 22859 template <typename Dispatch> createShadersEXT(uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const22860 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, 22861 const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, 22862 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 22863 VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 22864 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 22865 { 22866 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22867 return static_cast<Result>( d.vkCreateShadersEXT( m_device, 22868 createInfoCount, 22869 reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ), 22870 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 22871 reinterpret_cast<VkShaderEXT *>( pShaders ) ) ); 22872 } 22873 22874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 22875 template <typename ShaderEXTAllocator, typename Dispatch> 22876 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22877 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 22878 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22879 Dispatch const & d ) const 22880 { 22881 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22882 22883 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() ); 22884 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22885 d.vkCreateShadersEXT( m_device, 22886 createInfos.size(), 22887 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 22888 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22889 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 22890 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); 22891 22892 return createResultValueType( result, shaders ); 22893 } 22894 22895 template <typename ShaderEXTAllocator, 22896 typename Dispatch, 22897 typename B0, 22898 typename std::enable_if<std::is_same<typename B0::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type> 22899 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type createShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const22900 Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 22901 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22902 ShaderEXTAllocator & shaderEXTAllocator, 22903 Dispatch const & d ) const 22904 { 22905 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22906 22907 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator ); 22908 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22909 d.vkCreateShadersEXT( m_device, 22910 createInfos.size(), 22911 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 22912 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22913 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 22914 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); 22915 22916 return createResultValueType( result, shaders ); 22917 } 22918 22919 template <typename Dispatch> 22920 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type createShaderEXT(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22921 Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 22922 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22923 Dispatch const & d ) const 22924 { 22925 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22926 22927 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 22928 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22929 d.vkCreateShadersEXT( m_device, 22930 1, 22931 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 22932 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22933 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 22934 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" ); 22935 22936 return createResultValueType( result, shader ); 22937 } 22938 22939 # ifndef VULKAN_HPP_NO_SMART_HANDLE 22940 template <typename Dispatch, typename ShaderEXTAllocator> 22941 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22942 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const22943 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 22944 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22945 Dispatch const & d ) const 22946 { 22947 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22948 22949 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 22950 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22951 d.vkCreateShadersEXT( m_device, 22952 createInfos.size(), 22953 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 22954 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22955 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 22956 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); 22957 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders; 22958 uniqueShaders.reserve( createInfos.size() ); 22959 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 22960 for ( auto const & shader : shaders ) 22961 { 22962 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 22963 } 22964 return createResultValueType( result, std::move( uniqueShaders ) ); 22965 } 22966 22967 template <typename Dispatch, 22968 typename ShaderEXTAllocator, 22969 typename B0, 22970 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type> 22971 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 22972 typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type createShadersEXTUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,ShaderEXTAllocator & shaderEXTAllocator,Dispatch const & d) const22973 Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos, 22974 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 22975 ShaderEXTAllocator & shaderEXTAllocator, 22976 Dispatch const & d ) const 22977 { 22978 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 22979 22980 std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() ); 22981 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 22982 d.vkCreateShadersEXT( m_device, 22983 createInfos.size(), 22984 reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), 22985 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 22986 reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); 22987 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); 22988 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); 22989 uniqueShaders.reserve( createInfos.size() ); 22990 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 22991 for ( auto const & shader : shaders ) 22992 { 22993 uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); 22994 } 22995 return createResultValueType( result, std::move( uniqueShaders ) ); 22996 } 22997 22998 template <typename Dispatch> 22999 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type createShaderEXTUnique(const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23000 Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, 23001 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23002 Dispatch const & d ) const 23003 { 23004 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23005 23006 VULKAN_HPP_NAMESPACE::ShaderEXT shader; 23007 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23008 d.vkCreateShadersEXT( m_device, 23009 1, 23010 reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), 23011 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 23012 reinterpret_cast<VkShaderEXT *>( &shader ) ) ); 23013 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" ); 23014 23015 return createResultValueType( result, 23016 UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); 23017 } 23018 # endif /* VULKAN_HPP_NO_SMART_HANDLE */ 23019 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23020 23021 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23022 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 23023 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23024 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23025 { 23026 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23027 d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23028 } 23029 23030 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23031 template <typename Dispatch> destroyShaderEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23032 VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 23033 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23035 { 23036 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23037 23038 d.vkDestroyShaderEXT( m_device, 23039 static_cast<VkShaderEXT>( shader ), 23040 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23041 } 23042 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23043 23044 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const23045 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 23046 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23047 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23048 { 23049 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23050 d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 23051 } 23052 23053 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23054 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const23055 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, 23056 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, 23057 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23058 { 23059 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23060 23061 d.vkDestroyShaderEXT( m_device, 23062 static_cast<VkShaderEXT>( shader ), 23063 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 23064 } 23065 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23066 23067 template <typename Dispatch> 23068 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,size_t * pDataSize,void * pData,Dispatch const & d) const23069 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23070 { 23071 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23072 return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); 23073 } 23074 23075 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23076 template <typename Uint8_tAllocator, typename Dispatch> 23077 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Dispatch const & d) const23078 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const 23079 { 23080 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23081 23082 std::vector<uint8_t, Uint8_tAllocator> data; 23083 size_t dataSize; 23084 VULKAN_HPP_NAMESPACE::Result result; 23085 do 23086 { 23087 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 23088 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 23089 { 23090 data.resize( dataSize ); 23091 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23092 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 23093 } 23094 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23095 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 23096 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 23097 if ( dataSize < data.size() ) 23098 { 23099 data.resize( dataSize ); 23100 } 23101 return createResultValueType( result, data ); 23102 } 23103 23104 template <typename Uint8_tAllocator, 23105 typename Dispatch, 23106 typename B1, 23107 typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> 23108 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(VULKAN_HPP_NAMESPACE::ShaderEXT shader,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const23109 Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const 23110 { 23111 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23112 23113 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 23114 size_t dataSize; 23115 VULKAN_HPP_NAMESPACE::Result result; 23116 do 23117 { 23118 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); 23119 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) 23120 { 23121 data.resize( dataSize ); 23122 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23123 d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); 23124 } 23125 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23126 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); 23127 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 23128 if ( dataSize < data.size() ) 23129 { 23130 data.resize( dataSize ); 23131 } 23132 return createResultValueType( result, data ); 23133 } 23134 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23135 23136 template <typename Dispatch> bindShadersEXT(uint32_t stageCount,const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,Dispatch const & d) const23137 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, 23138 const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, 23139 const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, 23140 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23141 { 23142 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23143 d.vkCmdBindShadersEXT( 23144 m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) ); 23145 } 23146 23147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23148 template <typename Dispatch> bindShadersEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,Dispatch const & d) const23149 VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages, 23150 VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders, 23151 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 23152 { 23153 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23154 # ifdef VULKAN_HPP_NO_EXCEPTIONS 23155 VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); 23156 # else 23157 if ( stages.size() != shaders.size() ) 23158 { 23159 throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); 23160 } 23161 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 23162 23163 d.vkCmdBindShadersEXT( m_commandBuffer, 23164 stages.size(), 23165 reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ), 23166 reinterpret_cast<const VkShaderEXT *>( shaders.data() ) ); 23167 } 23168 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23169 23170 //=== VK_QCOM_tile_properties === 23171 23172 template <typename Dispatch> getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,uint32_t * pPropertiesCount,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const23173 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 23174 uint32_t * pPropertiesCount, 23175 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 23176 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23177 { 23178 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23179 return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( 23180 m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 23181 } 23182 23183 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23184 template <typename TilePropertiesQCOMAllocator, typename Dispatch> 23185 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Dispatch const & d) const23186 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const 23187 { 23188 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23189 23190 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties; 23191 uint32_t propertiesCount; 23192 VULKAN_HPP_NAMESPACE::Result result; 23193 do 23194 { 23195 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23196 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 23197 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 23198 { 23199 properties.resize( propertiesCount ); 23200 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 23201 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 23202 } 23203 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23204 23205 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 23206 if ( propertiesCount < properties.size() ) 23207 { 23208 properties.resize( propertiesCount ); 23209 } 23210 return properties; 23211 } 23212 23213 template <typename TilePropertiesQCOMAllocator, 23214 typename Dispatch, 23215 typename B1, 23216 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, int>::type> 23217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,Dispatch const & d) const23218 Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 23219 TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, 23220 Dispatch const & d ) const 23221 { 23222 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23223 23224 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator ); 23225 uint32_t propertiesCount; 23226 VULKAN_HPP_NAMESPACE::Result result; 23227 do 23228 { 23229 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23230 d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); 23231 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) 23232 { 23233 properties.resize( propertiesCount ); 23234 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetFramebufferTilePropertiesQCOM( 23235 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); 23236 } 23237 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23238 23239 VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); 23240 if ( propertiesCount < properties.size() ) 23241 { 23242 properties.resize( propertiesCount ); 23243 } 23244 return properties; 23245 } 23246 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23247 23248 template <typename Dispatch> getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,Dispatch const & d) const23249 VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, 23250 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, 23251 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23252 { 23253 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23254 return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( 23255 m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); 23256 } 23257 23258 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23259 template <typename Dispatch> 23260 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const23261 Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23262 { 23263 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23264 23265 VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; 23266 d.vkGetDynamicRenderingTilePropertiesQCOM( 23267 m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) ); 23268 23269 return properties; 23270 } 23271 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23272 23273 //=== VK_NV_low_latency2 === 23274 23275 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo,Dispatch const & d) const23276 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 23277 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, 23278 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23279 { 23280 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23281 return static_cast<Result>( 23282 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); 23283 } 23284 23285 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23286 template <typename Dispatch> setLatencySleepModeNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo,Dispatch const & d) const23287 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 23288 const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, 23289 Dispatch const & d ) const 23290 { 23291 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23292 23293 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23294 d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) ); 23295 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); 23296 23297 return createResultValueType( result ); 23298 } 23299 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23300 23301 template <typename Dispatch> latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo,Dispatch const & d) const23302 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 23303 const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, 23304 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23305 { 23306 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23307 return static_cast<Result>( 23308 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); 23309 } 23310 23311 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23312 template <typename Dispatch> 23313 VULKAN_HPP_INLINE typename ResultValueType<void>::type latencySleepNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo,Dispatch const & d) const23314 Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, Dispatch const & d ) const 23315 { 23316 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23317 23318 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23319 d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) ) ); 23320 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::latencySleepNV" ); 23321 23322 return createResultValueType( result ); 23323 } 23324 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23325 23326 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const23327 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 23328 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, 23329 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23330 { 23331 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23332 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 23333 } 23334 23335 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23336 template <typename Dispatch> setLatencyMarkerNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo,Dispatch const & d) const23337 VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 23338 const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, 23339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23340 { 23341 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23342 23343 d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 23344 } 23345 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23346 23347 template <typename Dispatch> getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo,Dispatch const & d) const23348 VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 23349 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, 23350 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23351 { 23352 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23353 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); 23354 } 23355 23356 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23357 template <typename Dispatch> 23358 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV getLatencyTimingsNV(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const23359 Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23360 { 23361 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23362 23363 VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; 23364 d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); 23365 23366 return latencyMarkerInfo; 23367 } 23368 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23369 23370 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo,Dispatch const & d) const23371 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, 23372 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23373 { 23374 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23375 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); 23376 } 23377 23378 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23379 template <typename Dispatch> notifyOutOfBandNV(const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo,Dispatch const & d) const23380 VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, 23381 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23382 { 23383 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23384 23385 d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) ); 23386 } 23387 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23388 23389 //=== VK_KHR_cooperative_matrix === 23390 23391 template <typename Dispatch> getCooperativeMatrixPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties,Dispatch const & d) const23392 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( 23393 uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23394 { 23395 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23396 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 23397 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); 23398 } 23399 23400 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23401 template <typename CooperativeMatrixPropertiesKHRAllocator, typename Dispatch> 23402 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 23403 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(Dispatch const & d) const23404 PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const 23405 { 23406 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23407 23408 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties; 23409 uint32_t propertyCount; 23410 VULKAN_HPP_NAMESPACE::Result result; 23411 do 23412 { 23413 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 23414 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 23415 { 23416 properties.resize( propertyCount ); 23417 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 23418 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 23419 } 23420 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23421 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 23422 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 23423 if ( propertyCount < properties.size() ) 23424 { 23425 properties.resize( propertyCount ); 23426 } 23427 return createResultValueType( result, properties ); 23428 } 23429 23430 template <typename CooperativeMatrixPropertiesKHRAllocator, 23431 typename Dispatch, 23432 typename B1, 23433 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value, int>::type> 23434 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 23435 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type getCooperativeMatrixPropertiesKHR(CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator,Dispatch const & d) const23436 PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, 23437 Dispatch const & d ) const 23438 { 23439 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23440 23441 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties( 23442 cooperativeMatrixPropertiesKHRAllocator ); 23443 uint32_t propertyCount; 23444 VULKAN_HPP_NAMESPACE::Result result; 23445 do 23446 { 23447 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 23448 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) 23449 { 23450 properties.resize( propertyCount ); 23451 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( 23452 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); 23453 } 23454 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23455 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); 23456 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 23457 if ( propertyCount < properties.size() ) 23458 { 23459 properties.resize( propertyCount ); 23460 } 23461 return createResultValueType( result, properties ); 23462 } 23463 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23464 23465 //=== VK_EXT_attachment_feedback_loop_dynamic_state === 23466 23467 template <typename Dispatch> setAttachmentFeedbackLoopEnableEXT(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask,Dispatch const & d) const23468 VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, 23469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23470 { 23471 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23472 d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) ); 23473 } 23474 23475 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 23476 //=== VK_QNX_external_memory_screen_buffer === 23477 23478 template <typename Dispatch> getScreenBufferPropertiesQNX(const struct _screen_buffer * buffer,VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,Dispatch const & d) const23479 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, 23480 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, 23481 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23482 { 23483 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23484 return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); 23485 } 23486 23487 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23488 template <typename Dispatch> 23489 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const23490 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 23491 { 23492 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23493 23494 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; 23495 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23496 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 23497 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 23498 23499 return createResultValueType( result, properties ); 23500 } 23501 23502 template <typename X, typename Y, typename... Z, typename Dispatch> 23503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const23504 Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const 23505 { 23506 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23507 23508 StructureChain<X, Y, Z...> structureChain; 23509 VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>(); 23510 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23511 d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); 23512 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); 23513 23514 return createResultValueType( result, structureChain ); 23515 } 23516 # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23517 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 23518 23519 //=== VK_KHR_calibrated_timestamps === 23520 23521 template <typename Dispatch> getCalibrateableTimeDomainsKHR(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains,Dispatch const & d) const23522 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, 23523 VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, 23524 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23525 { 23526 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23527 return static_cast<Result>( 23528 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); 23529 } 23530 23531 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23532 template <typename TimeDomainKHRAllocator, typename Dispatch> 23533 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(Dispatch const & d) const23534 PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const 23535 { 23536 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23537 23538 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; 23539 uint32_t timeDomainCount; 23540 VULKAN_HPP_NAMESPACE::Result result; 23541 do 23542 { 23543 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 23544 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 23545 { 23546 timeDomains.resize( timeDomainCount ); 23547 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23548 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 23549 } 23550 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23551 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 23552 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 23553 if ( timeDomainCount < timeDomains.size() ) 23554 { 23555 timeDomains.resize( timeDomainCount ); 23556 } 23557 return createResultValueType( result, timeDomains ); 23558 } 23559 23560 template <typename TimeDomainKHRAllocator, 23561 typename Dispatch, 23562 typename B1, 23563 typename std::enable_if<std::is_same<typename B1::value_type, VULKAN_HPP_NAMESPACE::TimeDomainKHR>::value, int>::type> 23564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator>>::type getCalibrateableTimeDomainsKHR(TimeDomainKHRAllocator & timeDomainKHRAllocator,Dispatch const & d) const23565 PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const 23566 { 23567 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23568 23569 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); 23570 uint32_t timeDomainCount; 23571 VULKAN_HPP_NAMESPACE::Result result; 23572 do 23573 { 23574 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); 23575 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) 23576 { 23577 timeDomains.resize( timeDomainCount ); 23578 result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23579 d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); 23580 } 23581 } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); 23582 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); 23583 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 23584 if ( timeDomainCount < timeDomains.size() ) 23585 { 23586 timeDomains.resize( timeDomainCount ); 23587 } 23588 return createResultValueType( result, timeDomains ); 23589 } 23590 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23591 23592 template <typename Dispatch> getCalibratedTimestampsKHR(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const23593 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, 23594 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, 23595 uint64_t * pTimestamps, 23596 uint64_t * pMaxDeviation, 23597 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23598 { 23599 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23600 return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( 23601 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); 23602 } 23603 23604 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23605 template <typename Uint64_tAllocator, typename Dispatch> 23606 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Dispatch const & d) const23607 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 23608 Dispatch const & d ) const 23609 { 23610 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23611 23612 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 23613 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 23614 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 23615 uint64_t & maxDeviation = data_.second; 23616 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 23617 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 23618 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 23619 23620 return createResultValueType( result, data_ ); 23621 } 23622 23623 template <typename Uint64_tAllocator, 23624 typename Dispatch, 23625 typename B0, 23626 typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type> 23627 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const23628 Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos, 23629 Uint64_tAllocator & uint64_tAllocator, 23630 Dispatch const & d ) const 23631 { 23632 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23633 23634 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( 23635 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); 23636 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; 23637 uint64_t & maxDeviation = data_.second; 23638 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetCalibratedTimestampsKHR( 23639 m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); 23640 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); 23641 23642 return createResultValueType( result, data_ ); 23643 } 23644 23645 template <typename Dispatch> 23646 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type getCalibratedTimestampKHR(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo,Dispatch const & d) const23647 Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const 23648 { 23649 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23650 23651 std::pair<uint64_t, uint64_t> data_; 23652 uint64_t & timestamp = data_.first; 23653 uint64_t & maxDeviation = data_.second; 23654 VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( 23655 d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); 23656 resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); 23657 23658 return createResultValueType( result, data_ ); 23659 } 23660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23661 23662 //=== VK_KHR_maintenance6 === 23663 23664 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo,Dispatch const & d) const23665 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo, 23666 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23667 { 23668 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23669 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( pBindDescriptorSetsInfo ) ); 23670 } 23671 23672 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23673 template <typename Dispatch> bindDescriptorSets2KHR(const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo,Dispatch const & d) const23674 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo, 23675 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23676 { 23677 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23678 23679 d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( &bindDescriptorSetsInfo ) ); 23680 } 23681 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23682 23683 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo,Dispatch const & d) const23684 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo, 23685 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23686 { 23687 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23688 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( pPushConstantsInfo ) ); 23689 } 23690 23691 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23692 template <typename Dispatch> pushConstants2KHR(const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo,Dispatch const & d) const23693 VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo, 23694 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23695 { 23696 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23697 23698 d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfoKHR *>( &pushConstantsInfo ) ); 23699 } 23700 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23701 23702 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo,Dispatch const & d) const23703 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo, 23704 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23705 { 23706 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23707 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( pPushDescriptorSetInfo ) ); 23708 } 23709 23710 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23711 template <typename Dispatch> pushDescriptorSet2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo,Dispatch const & d) const23712 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo, 23713 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23714 { 23715 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23716 23717 d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( &pushDescriptorSetInfo ) ); 23718 } 23719 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23720 23721 template <typename Dispatch> 23722 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo,Dispatch const & d) const23723 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo, 23724 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23725 { 23726 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23727 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 23728 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( pPushDescriptorSetWithTemplateInfo ) ); 23729 } 23730 23731 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23732 template <typename Dispatch> 23733 VULKAN_HPP_INLINE void pushDescriptorSetWithTemplate2KHR(const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo,Dispatch const & d) const23734 CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo, 23735 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23736 { 23737 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23738 23739 d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, 23740 reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( &pushDescriptorSetWithTemplateInfo ) ); 23741 } 23742 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23743 23744 template <typename Dispatch> 23745 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo,Dispatch const & d) const23746 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, 23747 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23748 { 23749 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23750 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); 23751 } 23752 23753 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23754 template <typename Dispatch> 23755 VULKAN_HPP_INLINE void setDescriptorBufferOffsets2EXT(const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo,Dispatch const & d) const23756 CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, 23757 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23758 { 23759 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23760 23761 d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) ); 23762 } 23763 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23764 23765 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const23766 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 23767 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, 23768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23769 { 23770 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23771 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 23772 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); 23773 } 23774 23775 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 23776 template <typename Dispatch> bindDescriptorBufferEmbeddedSamplers2EXT(const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo,Dispatch const & d) const23777 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( 23778 const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, 23779 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 23780 { 23781 VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); 23782 23783 d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( 23784 m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) ); 23785 } 23786 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ 23787 23788 } // namespace VULKAN_HPP_NAMESPACE 23789 #endif 23790