1 // Copyright 2015-2021 The Khronos Group Inc. 2 // 3 // SPDX-License-Identifier: Apache-2.0 OR MIT 4 // 5 6 // This header is generated from the Khronos Vulkan XML API Registry. 7 8 #ifndef VULKAN_FUNCS_HPP 9 #define VULKAN_FUNCS_HPP 10 11 namespace VULKAN_HPP_NAMESPACE 12 { 13 //=========================== 14 //=== COMMAND Definitions === 15 //=========================== 16 17 //=== VK_VERSION_1_0 === 18 19 template <typename Dispatch> 20 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Instance * pInstance,Dispatch const & d)21 createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, 22 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 23 VULKAN_HPP_NAMESPACE::Instance * pInstance, 24 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 25 { 26 return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), 27 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 28 reinterpret_cast<VkInstance *>( pInstance ) ) ); 29 } 30 31 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 32 template <typename Dispatch> 33 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 34 typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance(const InstanceCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d)35 createInstance( const InstanceCreateInfo & createInfo, 36 Optional<const AllocationCallbacks> allocator, 37 Dispatch const & d ) 38 { 39 VULKAN_HPP_NAMESPACE::Instance instance; 40 Result result = static_cast<Result>( 41 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 42 reinterpret_cast<const VkAllocationCallbacks *>( 43 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 44 reinterpret_cast<VkInstance *>( &instance ) ) ); 45 return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); 46 } 47 48 # ifndef VULKAN_HPP_NO_SMART_HANDLE 49 template <typename Dispatch> 50 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 51 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique(const InstanceCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d)52 createInstanceUnique( const InstanceCreateInfo & createInfo, 53 Optional<const AllocationCallbacks> allocator, 54 Dispatch const & d ) 55 { 56 VULKAN_HPP_NAMESPACE::Instance instance; 57 Result result = static_cast<Result>( 58 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), 59 reinterpret_cast<const VkAllocationCallbacks *>( 60 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 61 reinterpret_cast<VkInstance *>( &instance ) ) ); 62 ObjectDestroy<NoParent, Dispatch> deleter( allocator, d ); 63 return createResultValue<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( 64 result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter ); 65 } 66 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 67 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 68 69 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const70 VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 71 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 72 { 73 d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 74 } 75 76 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 77 template <typename Dispatch> destroy(Optional<const AllocationCallbacks> allocator,Dispatch const & d) const78 VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, 79 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 80 { 81 d.vkDestroyInstance( m_instance, 82 reinterpret_cast<const VkAllocationCallbacks *>( 83 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 84 } 85 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 86 87 template <typename Dispatch> 88 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumeratePhysicalDevices(uint32_t * pPhysicalDeviceCount,VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,Dispatch const & d) const89 Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, 90 VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, 91 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 92 { 93 return static_cast<Result>( d.vkEnumeratePhysicalDevices( 94 m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); 95 } 96 97 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 98 template <typename PhysicalDeviceAllocator, typename Dispatch> 99 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 100 typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(Dispatch const & d) const101 Instance::enumeratePhysicalDevices( Dispatch const & d ) const 102 { 103 std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; 104 uint32_t physicalDeviceCount; 105 Result result; 106 do 107 { 108 result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 109 if ( ( result == Result::eSuccess ) && physicalDeviceCount ) 110 { 111 physicalDevices.resize( physicalDeviceCount ); 112 result = static_cast<Result>( d.vkEnumeratePhysicalDevices( 113 m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 114 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 115 } 116 } while ( result == Result::eIncomplete ); 117 if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) ) 118 { 119 physicalDevices.resize( physicalDeviceCount ); 120 } 121 return createResultValue( 122 result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 123 } 124 125 template <typename PhysicalDeviceAllocator, 126 typename Dispatch, 127 typename B, 128 typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type> 129 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 130 typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices(PhysicalDeviceAllocator & physicalDeviceAllocator,Dispatch const & d) const131 Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const 132 { 133 std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); 134 uint32_t physicalDeviceCount; 135 Result result; 136 do 137 { 138 result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); 139 if ( ( result == Result::eSuccess ) && physicalDeviceCount ) 140 { 141 physicalDevices.resize( physicalDeviceCount ); 142 result = static_cast<Result>( d.vkEnumeratePhysicalDevices( 143 m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); 144 VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); 145 } 146 } while ( result == Result::eIncomplete ); 147 if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) ) 148 { 149 physicalDevices.resize( physicalDeviceCount ); 150 } 151 return createResultValue( 152 result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); 153 } 154 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 155 156 template <typename Dispatch> getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,Dispatch const & d) const157 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, 158 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 159 { 160 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); 161 } 162 163 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 164 template <typename Dispatch> 165 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const & d) const166 PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 167 { 168 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; 169 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); 170 return features; 171 } 172 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 173 174 template <typename Dispatch> 175 VULKAN_HPP_INLINE void getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,Dispatch const & d) const176 PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 177 VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, 178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 179 { 180 d.vkGetPhysicalDeviceFormatProperties( 181 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); 182 } 183 184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 185 template <typename Dispatch> 186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const187 PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 189 { 190 VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; 191 d.vkGetPhysicalDeviceFormatProperties( 192 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); 193 return formatProperties; 194 } 195 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 196 197 template <typename Dispatch> 198 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,Dispatch const & d) const199 PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 200 VULKAN_HPP_NAMESPACE::ImageType type, 201 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 202 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 203 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 204 VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, 205 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 206 { 207 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( 208 m_physicalDevice, 209 static_cast<VkFormat>( format ), 210 static_cast<VkImageType>( type ), 211 static_cast<VkImageTiling>( tiling ), 212 static_cast<VkImageUsageFlags>( usage ), 213 static_cast<VkImageCreateFlags>( flags ), 214 reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); 215 } 216 217 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 218 template <typename Dispatch> 219 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 220 typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,Dispatch const & d) const221 PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 222 VULKAN_HPP_NAMESPACE::ImageType type, 223 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 224 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 225 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 226 Dispatch const & d ) const 227 { 228 VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; 229 Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( 230 m_physicalDevice, 231 static_cast<VkFormat>( format ), 232 static_cast<VkImageType>( type ), 233 static_cast<VkImageTiling>( tiling ), 234 static_cast<VkImageUsageFlags>( usage ), 235 static_cast<VkImageCreateFlags>( flags ), 236 reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) ); 237 return createResultValue( 238 result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); 239 } 240 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 241 242 template <typename Dispatch> getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,Dispatch const & d) const243 VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, 244 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 245 { 246 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); 247 } 248 249 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 250 template <typename Dispatch> 251 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const & d) const252 PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 253 { 254 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; 255 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); 256 return properties; 257 } 258 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 259 260 template <typename Dispatch> 261 VULKAN_HPP_INLINE void getQueueFamilyProperties(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,Dispatch const & d) const262 PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, 263 VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, 264 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 265 { 266 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, 267 pQueueFamilyPropertyCount, 268 reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); 269 } 270 271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 272 template <typename QueueFamilyPropertiesAllocator, typename Dispatch> 273 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(Dispatch const & d) const274 PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const 275 { 276 std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; 277 uint32_t queueFamilyPropertyCount; 278 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 279 queueFamilyProperties.resize( queueFamilyPropertyCount ); 280 d.vkGetPhysicalDeviceQueueFamilyProperties( 281 m_physicalDevice, 282 &queueFamilyPropertyCount, 283 reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 284 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 285 return queueFamilyProperties; 286 } 287 288 template <typename QueueFamilyPropertiesAllocator, 289 typename Dispatch, 290 typename B, 291 typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type> 292 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties(QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,Dispatch const & d) const293 PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, 294 Dispatch const & d ) const 295 { 296 std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( 297 queueFamilyPropertiesAllocator ); 298 uint32_t queueFamilyPropertyCount; 299 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 300 queueFamilyProperties.resize( queueFamilyPropertyCount ); 301 d.vkGetPhysicalDeviceQueueFamilyProperties( 302 m_physicalDevice, 303 &queueFamilyPropertyCount, 304 reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); 305 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 306 return queueFamilyProperties; 307 } 308 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 309 310 template <typename Dispatch> 311 VULKAN_HPP_INLINE void getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,Dispatch const & d) const312 PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, 313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 314 { 315 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, 316 reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); 317 } 318 319 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 320 template <typename Dispatch> 321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const & d) const322 PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 323 { 324 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; 325 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, 326 reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); 327 return memoryProperties; 328 } 329 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 330 331 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const332 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, 333 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 334 { 335 return d.vkGetInstanceProcAddr( m_instance, pName ); 336 } 337 338 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 339 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const340 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, 341 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 342 { 343 return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); 344 } 345 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 346 347 template <typename Dispatch> getProcAddr(const char * pName,Dispatch const & d) const348 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, 349 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 350 { 351 return d.vkGetDeviceProcAddr( m_device, pName ); 352 } 353 354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 355 template <typename Dispatch> getProcAddr(const std::string & name,Dispatch const & d) const356 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, 357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 358 { 359 return d.vkGetDeviceProcAddr( m_device, name.c_str() ); 360 } 361 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 362 363 template <typename Dispatch> 364 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Device * pDevice,Dispatch const & d) const365 PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, 366 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 367 VULKAN_HPP_NAMESPACE::Device * pDevice, 368 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 369 { 370 return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, 371 reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), 372 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 373 reinterpret_cast<VkDevice *>( pDevice ) ) ); 374 } 375 376 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 377 template <typename Dispatch> 378 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice(const DeviceCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const379 PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, 380 Optional<const AllocationCallbacks> allocator, 381 Dispatch const & d ) const 382 { 383 VULKAN_HPP_NAMESPACE::Device device; 384 Result result = static_cast<Result>( 385 d.vkCreateDevice( m_physicalDevice, 386 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 387 reinterpret_cast<const VkAllocationCallbacks *>( 388 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 389 reinterpret_cast<VkDevice *>( &device ) ) ); 390 return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); 391 } 392 393 # ifndef VULKAN_HPP_NO_SMART_HANDLE 394 template <typename Dispatch> 395 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 396 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique(const DeviceCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const397 PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, 398 Optional<const AllocationCallbacks> allocator, 399 Dispatch const & d ) const 400 { 401 VULKAN_HPP_NAMESPACE::Device device; 402 Result result = static_cast<Result>( 403 d.vkCreateDevice( m_physicalDevice, 404 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), 405 reinterpret_cast<const VkAllocationCallbacks *>( 406 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 407 reinterpret_cast<VkDevice *>( &device ) ) ); 408 ObjectDestroy<NoParent, Dispatch> deleter( allocator, d ); 409 return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>( 410 result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter ); 411 } 412 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 413 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 414 415 template <typename Dispatch> destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const416 VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 417 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 418 { 419 d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 420 } 421 422 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 423 template <typename Dispatch> destroy(Optional<const AllocationCallbacks> allocator,Dispatch const & d) const424 VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, 425 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 426 { 427 d.vkDestroyDevice( m_device, 428 reinterpret_cast<const VkAllocationCallbacks *>( 429 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 430 } 431 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 432 433 template <typename Dispatch> 434 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d)435 enumerateInstanceExtensionProperties( const char * pLayerName, 436 uint32_t * pPropertyCount, 437 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 438 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 439 { 440 return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( 441 pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 442 } 443 444 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 445 template <typename ExtensionPropertiesAllocator, typename Dispatch> 446 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 447 typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d)448 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) 449 { 450 std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties; 451 uint32_t propertyCount; 452 Result result; 453 do 454 { 455 result = static_cast<Result>( 456 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 457 if ( ( result == Result::eSuccess ) && propertyCount ) 458 { 459 properties.resize( propertyCount ); 460 result = static_cast<Result>( 461 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, 462 &propertyCount, 463 reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 464 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 465 } 466 } while ( result == Result::eIncomplete ); 467 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 468 { 469 properties.resize( propertyCount ); 470 } 471 return createResultValue( 472 result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 473 } 474 475 template <typename ExtensionPropertiesAllocator, 476 typename Dispatch, 477 typename B, 478 typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type> 479 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 480 typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d)481 enumerateInstanceExtensionProperties( Optional<const std::string> layerName, 482 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 483 Dispatch const & d ) 484 { 485 std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 486 uint32_t propertyCount; 487 Result result; 488 do 489 { 490 result = static_cast<Result>( 491 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 492 if ( ( result == Result::eSuccess ) && propertyCount ) 493 { 494 properties.resize( propertyCount ); 495 result = static_cast<Result>( 496 d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, 497 &propertyCount, 498 reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 499 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 500 } 501 } while ( result == Result::eIncomplete ); 502 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 503 { 504 properties.resize( propertyCount ); 505 } 506 return createResultValue( 507 result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); 508 } 509 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 510 511 template <typename Dispatch> 512 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateDeviceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d) const513 PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, 514 uint32_t * pPropertyCount, 515 VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, 516 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 517 { 518 return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( 519 m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); 520 } 521 522 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 523 template <typename ExtensionPropertiesAllocator, typename Dispatch> 524 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 525 typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d) const526 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, 527 Dispatch const & d ) const 528 { 529 std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties; 530 uint32_t propertyCount; 531 Result result; 532 do 533 { 534 result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( 535 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 536 if ( ( result == Result::eSuccess ) && propertyCount ) 537 { 538 properties.resize( propertyCount ); 539 result = static_cast<Result>( 540 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, 541 layerName ? layerName->c_str() : nullptr, 542 &propertyCount, 543 reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 544 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 545 } 546 } while ( result == Result::eIncomplete ); 547 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 548 { 549 properties.resize( propertyCount ); 550 } 551 return createResultValue( 552 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 553 } 554 555 template <typename ExtensionPropertiesAllocator, 556 typename Dispatch, 557 typename B, 558 typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type> 559 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 560 typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d) const561 PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, 562 ExtensionPropertiesAllocator & extensionPropertiesAllocator, 563 Dispatch const & d ) const 564 { 565 std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); 566 uint32_t propertyCount; 567 Result result; 568 do 569 { 570 result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( 571 m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); 572 if ( ( result == Result::eSuccess ) && propertyCount ) 573 { 574 properties.resize( propertyCount ); 575 result = static_cast<Result>( 576 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, 577 layerName ? layerName->c_str() : nullptr, 578 &propertyCount, 579 reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); 580 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 581 } 582 } while ( result == Result::eIncomplete ); 583 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 584 { 585 properties.resize( propertyCount ); 586 } 587 return createResultValue( 588 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); 589 } 590 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 591 592 template <typename Dispatch> 593 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d)594 enumerateInstanceLayerProperties( uint32_t * pPropertyCount, 595 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 596 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 597 { 598 return static_cast<Result>( 599 d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 600 } 601 602 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 603 template <typename LayerPropertiesAllocator, typename Dispatch> 604 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 605 typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(Dispatch const & d)606 enumerateInstanceLayerProperties( Dispatch const & d ) 607 { 608 std::vector<LayerProperties, LayerPropertiesAllocator> properties; 609 uint32_t propertyCount; 610 Result result; 611 do 612 { 613 result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 614 if ( ( result == Result::eSuccess ) && propertyCount ) 615 { 616 properties.resize( propertyCount ); 617 result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( 618 &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 619 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 620 } 621 } while ( result == Result::eIncomplete ); 622 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 623 { 624 properties.resize( propertyCount ); 625 } 626 return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 627 } 628 629 template <typename LayerPropertiesAllocator, 630 typename Dispatch, 631 typename B, 632 typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type> 633 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 634 typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d)635 enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) 636 { 637 std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 638 uint32_t propertyCount; 639 Result result; 640 do 641 { 642 result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); 643 if ( ( result == Result::eSuccess ) && propertyCount ) 644 { 645 properties.resize( propertyCount ); 646 result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( 647 &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 648 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 649 } 650 } while ( result == Result::eIncomplete ); 651 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 652 { 653 properties.resize( propertyCount ); 654 } 655 return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); 656 } 657 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 658 659 template <typename Dispatch> 660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateDeviceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d) const661 PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, 662 VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, 663 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 664 { 665 return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( 666 m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); 667 } 668 669 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 670 template <typename LayerPropertiesAllocator, typename Dispatch> 671 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 672 typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(Dispatch const & d) const673 PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const 674 { 675 std::vector<LayerProperties, LayerPropertiesAllocator> properties; 676 uint32_t propertyCount; 677 Result result; 678 do 679 { 680 result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 681 if ( ( result == Result::eSuccess ) && propertyCount ) 682 { 683 properties.resize( propertyCount ); 684 result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( 685 m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 686 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 687 } 688 } while ( result == Result::eIncomplete ); 689 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 690 { 691 properties.resize( propertyCount ); 692 } 693 return createResultValue( 694 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 695 } 696 697 template <typename LayerPropertiesAllocator, 698 typename Dispatch, 699 typename B, 700 typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type> 701 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 702 typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d) const703 PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, 704 Dispatch const & d ) const 705 { 706 std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); 707 uint32_t propertyCount; 708 Result result; 709 do 710 { 711 result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); 712 if ( ( result == Result::eSuccess ) && propertyCount ) 713 { 714 properties.resize( propertyCount ); 715 result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( 716 m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); 717 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 718 } 719 } while ( result == Result::eIncomplete ); 720 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 721 { 722 properties.resize( propertyCount ); 723 } 724 return createResultValue( 725 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); 726 } 727 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 728 729 template <typename Dispatch> getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const730 VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, 731 uint32_t queueIndex, 732 VULKAN_HPP_NAMESPACE::Queue * pQueue, 733 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 734 { 735 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); 736 } 737 738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 739 template <typename Dispatch> 740 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,Dispatch const & d) const741 Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 742 { 743 VULKAN_HPP_NAMESPACE::Queue queue; 744 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); 745 return queue; 746 } 747 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 748 749 template <typename Dispatch> submit(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const750 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, 751 const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, 752 VULKAN_HPP_NAMESPACE::Fence fence, 753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 754 { 755 return static_cast<Result>( d.vkQueueSubmit( 756 m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 757 } 758 759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 760 template <typename Dispatch> 761 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type submit(ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const762 Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, 763 VULKAN_HPP_NAMESPACE::Fence fence, 764 Dispatch const & d ) const 765 { 766 Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, 767 submits.size(), 768 reinterpret_cast<const VkSubmitInfo *>( submits.data() ), 769 static_cast<VkFence>( fence ) ) ); 770 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); 771 } 772 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 773 774 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 775 template <typename Dispatch> waitIdle(Dispatch const & d) const776 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 777 { 778 return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); 779 } 780 #else 781 template <typename Dispatch> 782 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type waitIdle(Dispatch const & d) const783 Queue::waitIdle( Dispatch const & d ) const 784 { 785 Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); 786 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); 787 } 788 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 789 790 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 791 template <typename Dispatch> waitIdle(Dispatch const & d) const792 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 793 { 794 return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); 795 } 796 #else 797 template <typename Dispatch> 798 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type waitIdle(Dispatch const & d) const799 Device::waitIdle( Dispatch const & d ) const 800 { 801 Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); 802 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); 803 } 804 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 805 806 template <typename Dispatch> 807 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,Dispatch const & d) const808 Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, 809 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 810 VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, 811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 812 { 813 return static_cast<Result>( d.vkAllocateMemory( m_device, 814 reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), 815 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 816 reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); 817 } 818 819 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 820 template <typename Dispatch> 821 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 822 typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory(const MemoryAllocateInfo & allocateInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const823 Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, 824 Optional<const AllocationCallbacks> allocator, 825 Dispatch const & d ) const 826 { 827 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 828 Result result = static_cast<Result>( 829 d.vkAllocateMemory( m_device, 830 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 831 reinterpret_cast<const VkAllocationCallbacks *>( 832 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 833 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 834 return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); 835 } 836 837 # ifndef VULKAN_HPP_NO_SMART_HANDLE 838 template <typename Dispatch> 839 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 840 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique(const MemoryAllocateInfo & allocateInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const841 Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, 842 Optional<const AllocationCallbacks> allocator, 843 Dispatch const & d ) const 844 { 845 VULKAN_HPP_NAMESPACE::DeviceMemory memory; 846 Result result = static_cast<Result>( 847 d.vkAllocateMemory( m_device, 848 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), 849 reinterpret_cast<const VkAllocationCallbacks *>( 850 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 851 reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); 852 ObjectFree<Device, Dispatch> deleter( *this, allocator, d ); 853 return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( 854 result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter ); 855 } 856 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 857 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 858 859 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const860 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 861 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 862 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 863 { 864 d.vkFreeMemory( 865 m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 866 } 867 868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 869 template <typename Dispatch> freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const870 VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 871 Optional<const AllocationCallbacks> allocator, 872 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 873 { 874 d.vkFreeMemory( m_device, 875 static_cast<VkDeviceMemory>( memory ), 876 reinterpret_cast<const VkAllocationCallbacks *>( 877 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 878 } 879 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 880 881 template <typename Dispatch> free(VULKAN_HPP_NAMESPACE::DeviceMemory memory,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const882 VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 883 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 884 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 885 { 886 d.vkFreeMemory( 887 m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 888 } 889 890 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 891 template <typename Dispatch> free(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const892 VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 893 Optional<const AllocationCallbacks> allocator, 894 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 895 { 896 d.vkFreeMemory( m_device, 897 static_cast<VkDeviceMemory>( memory ), 898 reinterpret_cast<const VkAllocationCallbacks *>( 899 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 900 } 901 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 902 903 template <typename Dispatch> mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,void ** ppData,Dispatch const & d) const904 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 905 VULKAN_HPP_NAMESPACE::DeviceSize offset, 906 VULKAN_HPP_NAMESPACE::DeviceSize size, 907 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 908 void ** ppData, 909 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 910 { 911 return static_cast<Result>( d.vkMapMemory( m_device, 912 static_cast<VkDeviceMemory>( memory ), 913 static_cast<VkDeviceSize>( offset ), 914 static_cast<VkDeviceSize>( size ), 915 static_cast<VkMemoryMapFlags>( flags ), 916 ppData ) ); 917 } 918 919 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 920 template <typename Dispatch> 921 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void *>::type mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,Dispatch const & d) const922 Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 923 VULKAN_HPP_NAMESPACE::DeviceSize offset, 924 VULKAN_HPP_NAMESPACE::DeviceSize size, 925 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, 926 Dispatch const & d ) const 927 { 928 void * pData; 929 Result result = static_cast<Result>( d.vkMapMemory( m_device, 930 static_cast<VkDeviceMemory>( memory ), 931 static_cast<VkDeviceSize>( offset ), 932 static_cast<VkDeviceSize>( size ), 933 static_cast<VkMemoryMapFlags>( flags ), 934 &pData ) ); 935 return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); 936 } 937 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 938 939 template <typename Dispatch> unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const940 VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 941 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 942 { 943 d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) ); 944 } 945 946 template <typename Dispatch> 947 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result flushMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const948 Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, 949 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 950 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 951 { 952 return static_cast<Result>( d.vkFlushMappedMemoryRanges( 953 m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 954 } 955 956 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 957 template <typename Dispatch> 958 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type flushMappedMemoryRanges(ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const959 Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, 960 Dispatch const & d ) const 961 { 962 Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( 963 m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 964 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); 965 } 966 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 967 968 template <typename Dispatch> 969 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result invalidateMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const970 Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, 971 const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, 972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 973 { 974 return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( 975 m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); 976 } 977 978 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 979 template <typename Dispatch> 980 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type invalidateMappedMemoryRanges(ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const981 Device::invalidateMappedMemoryRanges( 982 ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const 983 { 984 Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( 985 m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); 986 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); 987 } 988 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 989 990 template <typename Dispatch> getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,Dispatch const & d) const991 VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 992 VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, 993 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 994 { 995 d.vkGetDeviceMemoryCommitment( 996 m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); 997 } 998 999 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1000 template <typename Dispatch> 1001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const1002 Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1003 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1004 { 1005 VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; 1006 d.vkGetDeviceMemoryCommitment( 1007 m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); 1008 return committedMemoryInBytes; 1009 } 1010 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1011 1012 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1013 template <typename Dispatch> bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, 1015 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1016 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1017 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1018 { 1019 return static_cast<Result>( d.vkBindBufferMemory( m_device, 1020 static_cast<VkBuffer>( buffer ), 1021 static_cast<VkDeviceMemory>( memory ), 1022 static_cast<VkDeviceSize>( memoryOffset ) ) ); 1023 } 1024 #else 1025 template <typename Dispatch> 1026 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1027 Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, 1028 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1029 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1030 Dispatch const & d ) const 1031 { 1032 Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, 1033 static_cast<VkBuffer>( buffer ), 1034 static_cast<VkDeviceMemory>( memory ), 1035 static_cast<VkDeviceSize>( memoryOffset ) ) ); 1036 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); 1037 } 1038 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1039 1040 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1041 template <typename Dispatch> bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1042 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, 1043 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1044 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1045 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1046 { 1047 return static_cast<Result>( d.vkBindImageMemory( m_device, 1048 static_cast<VkImage>( image ), 1049 static_cast<VkDeviceMemory>( memory ), 1050 static_cast<VkDeviceSize>( memoryOffset ) ) ); 1051 } 1052 #else 1053 template <typename Dispatch> 1054 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const1055 Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, 1056 VULKAN_HPP_NAMESPACE::DeviceMemory memory, 1057 VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, 1058 Dispatch const & d ) const 1059 { 1060 Result result = static_cast<Result>( d.vkBindImageMemory( m_device, 1061 static_cast<VkImage>( image ), 1062 static_cast<VkDeviceMemory>( memory ), 1063 static_cast<VkDeviceSize>( memoryOffset ) ) ); 1064 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); 1065 } 1066 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1067 1068 template <typename Dispatch> 1069 VULKAN_HPP_INLINE void getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1070 Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, 1071 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1072 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1073 { 1074 d.vkGetBufferMemoryRequirements( 1075 m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1076 } 1077 1078 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1079 template <typename Dispatch> 1080 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,Dispatch const & d) const1081 Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, 1082 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1083 { 1084 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1085 d.vkGetBufferMemoryRequirements( 1086 m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1087 return memoryRequirements; 1088 } 1089 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1090 1091 template <typename Dispatch> 1092 VULKAN_HPP_INLINE void getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const1093 Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1094 VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, 1095 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1096 { 1097 d.vkGetImageMemoryRequirements( 1098 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); 1099 } 1100 1101 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1102 template <typename Dispatch> 1103 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1104 Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, 1105 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1106 { 1107 VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; 1108 d.vkGetImageMemoryRequirements( 1109 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); 1110 return memoryRequirements; 1111 } 1112 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1113 1114 template <typename Dispatch> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,Dispatch const & d) const1115 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( 1116 VULKAN_HPP_NAMESPACE::Image image, 1117 uint32_t * pSparseMemoryRequirementCount, 1118 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, 1119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1120 { 1121 d.vkGetImageSparseMemoryRequirements( 1122 m_device, 1123 static_cast<VkImage>( image ), 1124 pSparseMemoryRequirementCount, 1125 reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); 1126 } 1127 1128 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1129 template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch> 1130 VULKAN_HPP_NODISCARD 1131 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const1132 Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 1133 { 1134 std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; 1135 uint32_t sparseMemoryRequirementCount; 1136 d.vkGetImageSparseMemoryRequirements( 1137 m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1138 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1139 d.vkGetImageSparseMemoryRequirements( 1140 m_device, 1141 static_cast<VkImage>( image ), 1142 &sparseMemoryRequirementCount, 1143 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1144 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1145 return sparseMemoryRequirements; 1146 } 1147 1148 template < 1149 typename SparseImageMemoryRequirementsAllocator, 1150 typename Dispatch, 1151 typename B, 1152 typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type> 1153 VULKAN_HPP_NODISCARD 1154 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,Dispatch const & d) const1155 Device::getImageSparseMemoryRequirements( 1156 VULKAN_HPP_NAMESPACE::Image image, 1157 SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, 1158 Dispatch const & d ) const 1159 { 1160 std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( 1161 sparseImageMemoryRequirementsAllocator ); 1162 uint32_t sparseMemoryRequirementCount; 1163 d.vkGetImageSparseMemoryRequirements( 1164 m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); 1165 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 1166 d.vkGetImageSparseMemoryRequirements( 1167 m_device, 1168 static_cast<VkImage>( image ), 1169 &sparseMemoryRequirementCount, 1170 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); 1171 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 1172 return sparseMemoryRequirements; 1173 } 1174 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1175 1176 template <typename Dispatch> 1177 VULKAN_HPP_INLINE void getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,Dispatch const & d) const1178 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1179 VULKAN_HPP_NAMESPACE::ImageType type, 1180 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1181 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1182 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1183 uint32_t * pPropertyCount, 1184 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, 1185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1186 { 1187 d.vkGetPhysicalDeviceSparseImageFormatProperties( 1188 m_physicalDevice, 1189 static_cast<VkFormat>( format ), 1190 static_cast<VkImageType>( type ), 1191 static_cast<VkSampleCountFlagBits>( samples ), 1192 static_cast<VkImageUsageFlags>( usage ), 1193 static_cast<VkImageTiling>( tiling ), 1194 pPropertyCount, 1195 reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); 1196 } 1197 1198 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1199 template <typename SparseImageFormatPropertiesAllocator, typename Dispatch> 1200 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,Dispatch const & d) const1201 PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, 1202 VULKAN_HPP_NAMESPACE::ImageType type, 1203 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1204 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1205 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1206 Dispatch const & d ) const 1207 { 1208 std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; 1209 uint32_t propertyCount; 1210 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1211 static_cast<VkFormat>( format ), 1212 static_cast<VkImageType>( type ), 1213 static_cast<VkSampleCountFlagBits>( samples ), 1214 static_cast<VkImageUsageFlags>( usage ), 1215 static_cast<VkImageTiling>( tiling ), 1216 &propertyCount, 1217 nullptr ); 1218 properties.resize( propertyCount ); 1219 d.vkGetPhysicalDeviceSparseImageFormatProperties( 1220 m_physicalDevice, 1221 static_cast<VkFormat>( format ), 1222 static_cast<VkImageType>( type ), 1223 static_cast<VkSampleCountFlagBits>( samples ), 1224 static_cast<VkImageUsageFlags>( usage ), 1225 static_cast<VkImageTiling>( tiling ), 1226 &propertyCount, 1227 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1228 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1229 return properties; 1230 } 1231 1232 template < 1233 typename SparseImageFormatPropertiesAllocator, 1234 typename Dispatch, 1235 typename B, 1236 typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type> 1237 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageTiling tiling,SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,Dispatch const & d) const1238 PhysicalDevice::getSparseImageFormatProperties( 1239 VULKAN_HPP_NAMESPACE::Format format, 1240 VULKAN_HPP_NAMESPACE::ImageType type, 1241 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 1242 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 1243 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 1244 SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, 1245 Dispatch const & d ) const 1246 { 1247 std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( 1248 sparseImageFormatPropertiesAllocator ); 1249 uint32_t propertyCount; 1250 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, 1251 static_cast<VkFormat>( format ), 1252 static_cast<VkImageType>( type ), 1253 static_cast<VkSampleCountFlagBits>( samples ), 1254 static_cast<VkImageUsageFlags>( usage ), 1255 static_cast<VkImageTiling>( tiling ), 1256 &propertyCount, 1257 nullptr ); 1258 properties.resize( propertyCount ); 1259 d.vkGetPhysicalDeviceSparseImageFormatProperties( 1260 m_physicalDevice, 1261 static_cast<VkFormat>( format ), 1262 static_cast<VkImageType>( type ), 1263 static_cast<VkSampleCountFlagBits>( samples ), 1264 static_cast<VkImageUsageFlags>( usage ), 1265 static_cast<VkImageTiling>( tiling ), 1266 &propertyCount, 1267 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); 1268 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 1269 return properties; 1270 } 1271 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1272 1273 template <typename Dispatch> 1274 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindSparse(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1275 Queue::bindSparse( uint32_t bindInfoCount, 1276 const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, 1277 VULKAN_HPP_NAMESPACE::Fence fence, 1278 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1279 { 1280 return static_cast<Result>( d.vkQueueBindSparse( m_queue, 1281 bindInfoCount, 1282 reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), 1283 static_cast<VkFence>( fence ) ) ); 1284 } 1285 1286 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1287 template <typename Dispatch> 1288 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindSparse(ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1289 Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, 1290 VULKAN_HPP_NAMESPACE::Fence fence, 1291 Dispatch const & d ) const 1292 { 1293 Result result = 1294 static_cast<Result>( d.vkQueueBindSparse( m_queue, 1295 bindInfo.size(), 1296 reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), 1297 static_cast<VkFence>( fence ) ) ); 1298 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); 1299 } 1300 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1301 1302 template <typename Dispatch> 1303 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const1304 Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, 1305 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1306 VULKAN_HPP_NAMESPACE::Fence * pFence, 1307 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1308 { 1309 return static_cast<Result>( d.vkCreateFence( m_device, 1310 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), 1311 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1312 reinterpret_cast<VkFence *>( pFence ) ) ); 1313 } 1314 1315 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1316 template <typename Dispatch> 1317 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence(const FenceCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1318 Device::createFence( const FenceCreateInfo & createInfo, 1319 Optional<const AllocationCallbacks> allocator, 1320 Dispatch const & d ) const 1321 { 1322 VULKAN_HPP_NAMESPACE::Fence fence; 1323 Result result = static_cast<Result>( 1324 d.vkCreateFence( m_device, 1325 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1326 reinterpret_cast<const VkAllocationCallbacks *>( 1327 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1328 reinterpret_cast<VkFence *>( &fence ) ) ); 1329 return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); 1330 } 1331 1332 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1333 template <typename Dispatch> 1334 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1335 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type createFenceUnique(const FenceCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1336 Device::createFenceUnique( const FenceCreateInfo & createInfo, 1337 Optional<const AllocationCallbacks> allocator, 1338 Dispatch const & d ) const 1339 { 1340 VULKAN_HPP_NAMESPACE::Fence fence; 1341 Result result = static_cast<Result>( 1342 d.vkCreateFence( m_device, 1343 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), 1344 reinterpret_cast<const VkAllocationCallbacks *>( 1345 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1346 reinterpret_cast<VkFence *>( &fence ) ) ); 1347 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 1348 return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( 1349 result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter ); 1350 } 1351 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 1352 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1353 1354 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1355 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1356 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1358 { 1359 d.vkDestroyFence( 1360 m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1361 } 1362 1363 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1364 template <typename Dispatch> destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1365 VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, 1366 Optional<const AllocationCallbacks> allocator, 1367 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1368 { 1369 d.vkDestroyFence( m_device, 1370 static_cast<VkFence>( fence ), 1371 reinterpret_cast<const VkAllocationCallbacks *>( 1372 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1373 } 1374 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1375 1376 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1377 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1378 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1380 { 1381 d.vkDestroyFence( 1382 m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1383 } 1384 1385 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1386 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1387 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, 1388 Optional<const AllocationCallbacks> allocator, 1389 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1390 { 1391 d.vkDestroyFence( m_device, 1392 static_cast<VkFence>( fence ), 1393 reinterpret_cast<const VkAllocationCallbacks *>( 1394 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1395 } 1396 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1397 1398 template <typename Dispatch> resetFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,Dispatch const & d) const1399 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, 1400 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1401 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1402 { 1403 return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); 1404 } 1405 1406 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1407 template <typename Dispatch> 1408 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetFences(ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,Dispatch const & d) const1409 Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const 1410 { 1411 Result result = static_cast<Result>( 1412 d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) ); 1413 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); 1414 } 1415 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1416 1417 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1418 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1419 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, 1420 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1421 { 1422 return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1423 } 1424 #else 1425 template <typename Dispatch> getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1426 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, 1427 Dispatch const & d ) const 1428 { 1429 Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); 1430 return createResultValue( result, 1431 VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", 1432 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1433 } 1434 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1435 1436 template <typename Dispatch> waitForFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1437 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, 1438 const VULKAN_HPP_NAMESPACE::Fence * pFences, 1439 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1440 uint64_t timeout, 1441 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1442 { 1443 return static_cast<Result>( d.vkWaitForFences( 1444 m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); 1445 } 1446 1447 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1448 template <typename Dispatch> 1449 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result waitForFences(ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1450 Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, 1451 VULKAN_HPP_NAMESPACE::Bool32 waitAll, 1452 uint64_t timeout, 1453 Dispatch const & d ) const 1454 { 1455 Result result = static_cast<Result>( d.vkWaitForFences( m_device, 1456 fences.size(), 1457 reinterpret_cast<const VkFence *>( fences.data() ), 1458 static_cast<VkBool32>( waitAll ), 1459 timeout ) ); 1460 return createResultValue( result, 1461 VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", 1462 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 1463 } 1464 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1465 1466 template <typename Dispatch> 1467 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,Dispatch const & d) const1468 Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, 1469 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1470 VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, 1471 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1472 { 1473 return static_cast<Result>( d.vkCreateSemaphore( m_device, 1474 reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), 1475 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1476 reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); 1477 } 1478 1479 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1480 template <typename Dispatch> 1481 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1482 typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore(const SemaphoreCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1483 Device::createSemaphore( const SemaphoreCreateInfo & createInfo, 1484 Optional<const AllocationCallbacks> allocator, 1485 Dispatch const & d ) const 1486 { 1487 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1488 Result result = static_cast<Result>( 1489 d.vkCreateSemaphore( m_device, 1490 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1491 reinterpret_cast<const VkAllocationCallbacks *>( 1492 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1493 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1494 return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); 1495 } 1496 1497 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1498 template <typename Dispatch> 1499 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1500 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique(const SemaphoreCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1501 Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, 1502 Optional<const AllocationCallbacks> allocator, 1503 Dispatch const & d ) const 1504 { 1505 VULKAN_HPP_NAMESPACE::Semaphore semaphore; 1506 Result result = static_cast<Result>( 1507 d.vkCreateSemaphore( m_device, 1508 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), 1509 reinterpret_cast<const VkAllocationCallbacks *>( 1510 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1511 reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); 1512 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 1513 return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( 1514 result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter ); 1515 } 1516 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 1517 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1518 1519 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1520 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1521 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1522 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1523 { 1524 d.vkDestroySemaphore( 1525 m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1526 } 1527 1528 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1529 template <typename Dispatch> destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1530 VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1531 Optional<const AllocationCallbacks> allocator, 1532 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1533 { 1534 d.vkDestroySemaphore( m_device, 1535 static_cast<VkSemaphore>( semaphore ), 1536 reinterpret_cast<const VkAllocationCallbacks *>( 1537 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1538 } 1539 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1540 1541 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1542 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1543 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1544 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1545 { 1546 d.vkDestroySemaphore( 1547 m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1548 } 1549 1550 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1551 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1552 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, 1553 Optional<const AllocationCallbacks> allocator, 1554 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1555 { 1556 d.vkDestroySemaphore( m_device, 1557 static_cast<VkSemaphore>( semaphore ), 1558 reinterpret_cast<const VkAllocationCallbacks *>( 1559 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1560 } 1561 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1562 1563 template <typename Dispatch> 1564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Event * pEvent,Dispatch const & d) const1565 Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, 1566 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1567 VULKAN_HPP_NAMESPACE::Event * pEvent, 1568 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1569 { 1570 return static_cast<Result>( d.vkCreateEvent( m_device, 1571 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), 1572 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1573 reinterpret_cast<VkEvent *>( pEvent ) ) ); 1574 } 1575 1576 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1577 template <typename Dispatch> 1578 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent(const EventCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1579 Device::createEvent( const EventCreateInfo & createInfo, 1580 Optional<const AllocationCallbacks> allocator, 1581 Dispatch const & d ) const 1582 { 1583 VULKAN_HPP_NAMESPACE::Event event; 1584 Result result = static_cast<Result>( 1585 d.vkCreateEvent( m_device, 1586 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1587 reinterpret_cast<const VkAllocationCallbacks *>( 1588 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1589 reinterpret_cast<VkEvent *>( &event ) ) ); 1590 return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); 1591 } 1592 1593 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1594 template <typename Dispatch> 1595 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1596 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type createEventUnique(const EventCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1597 Device::createEventUnique( const EventCreateInfo & createInfo, 1598 Optional<const AllocationCallbacks> allocator, 1599 Dispatch const & d ) const 1600 { 1601 VULKAN_HPP_NAMESPACE::Event event; 1602 Result result = static_cast<Result>( 1603 d.vkCreateEvent( m_device, 1604 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), 1605 reinterpret_cast<const VkAllocationCallbacks *>( 1606 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1607 reinterpret_cast<VkEvent *>( &event ) ) ); 1608 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 1609 return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>( 1610 result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter ); 1611 } 1612 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 1613 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1614 1615 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1616 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1617 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1618 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1619 { 1620 d.vkDestroyEvent( 1621 m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1622 } 1623 1624 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1625 template <typename Dispatch> destroyEvent(VULKAN_HPP_NAMESPACE::Event event,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1626 VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, 1627 Optional<const AllocationCallbacks> allocator, 1628 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1629 { 1630 d.vkDestroyEvent( m_device, 1631 static_cast<VkEvent>( event ), 1632 reinterpret_cast<const VkAllocationCallbacks *>( 1633 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1634 } 1635 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1636 1637 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1638 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1639 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1640 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1641 { 1642 d.vkDestroyEvent( 1643 m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1644 } 1645 1646 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1647 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Event event,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1648 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, 1649 Optional<const AllocationCallbacks> allocator, 1650 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1651 { 1652 d.vkDestroyEvent( m_device, 1653 static_cast<VkEvent>( event ), 1654 reinterpret_cast<const VkAllocationCallbacks *>( 1655 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1656 } 1657 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1658 1659 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1660 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1661 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, 1662 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1663 { 1664 return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1665 } 1666 #else 1667 template <typename Dispatch> getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1668 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, 1669 Dispatch const & d ) const 1670 { 1671 Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); 1672 return createResultValue( result, 1673 VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", 1674 { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); 1675 } 1676 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1677 1678 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1679 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1680 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, 1681 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1682 { 1683 return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1684 } 1685 #else 1686 template <typename Dispatch> 1687 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1688 Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const 1689 { 1690 Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1691 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); 1692 } 1693 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1694 1695 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 1696 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1697 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 1698 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1699 { 1700 return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1701 } 1702 #else 1703 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1704 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 1705 Dispatch const & d ) const 1706 { 1707 Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); 1708 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); 1709 } 1710 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1711 1712 template <typename Dispatch> 1713 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,Dispatch const & d) const1714 Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, 1715 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1716 VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, 1717 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1718 { 1719 return static_cast<Result>( d.vkCreateQueryPool( m_device, 1720 reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), 1721 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1722 reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); 1723 } 1724 1725 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1726 template <typename Dispatch> 1727 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1728 typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool(const QueryPoolCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1729 Device::createQueryPool( const QueryPoolCreateInfo & createInfo, 1730 Optional<const AllocationCallbacks> allocator, 1731 Dispatch const & d ) const 1732 { 1733 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 1734 Result result = static_cast<Result>( 1735 d.vkCreateQueryPool( m_device, 1736 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 1737 reinterpret_cast<const VkAllocationCallbacks *>( 1738 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1739 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 1740 return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); 1741 } 1742 1743 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1744 template <typename Dispatch> 1745 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1746 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique(const QueryPoolCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1747 Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, 1748 Optional<const AllocationCallbacks> allocator, 1749 Dispatch const & d ) const 1750 { 1751 VULKAN_HPP_NAMESPACE::QueryPool queryPool; 1752 Result result = static_cast<Result>( 1753 d.vkCreateQueryPool( m_device, 1754 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), 1755 reinterpret_cast<const VkAllocationCallbacks *>( 1756 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1757 reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); 1758 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 1759 return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( 1760 result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter ); 1761 } 1762 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 1763 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1764 1765 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1766 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1767 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1768 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1769 { 1770 d.vkDestroyQueryPool( 1771 m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1772 } 1773 1774 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1775 template <typename Dispatch> destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1776 VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1777 Optional<const AllocationCallbacks> allocator, 1778 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1779 { 1780 d.vkDestroyQueryPool( m_device, 1781 static_cast<VkQueryPool>( queryPool ), 1782 reinterpret_cast<const VkAllocationCallbacks *>( 1783 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1784 } 1785 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1786 1787 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1788 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1789 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1790 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1791 { 1792 d.vkDestroyQueryPool( 1793 m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1794 } 1795 1796 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1797 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::QueryPool queryPool,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1798 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1799 Optional<const AllocationCallbacks> allocator, 1800 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1801 { 1802 d.vkDestroyQueryPool( m_device, 1803 static_cast<VkQueryPool>( queryPool ), 1804 reinterpret_cast<const VkAllocationCallbacks *>( 1805 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1806 } 1807 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1808 1809 template <typename Dispatch> 1810 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1811 Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1812 uint32_t firstQuery, 1813 uint32_t queryCount, 1814 size_t dataSize, 1815 void * pData, 1816 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1817 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1818 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1819 { 1820 return static_cast<Result>( d.vkGetQueryPoolResults( m_device, 1821 static_cast<VkQueryPool>( queryPool ), 1822 firstQuery, 1823 queryCount, 1824 dataSize, 1825 pData, 1826 static_cast<VkDeviceSize>( stride ), 1827 static_cast<VkQueryResultFlags>( flags ) ) ); 1828 } 1829 1830 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1831 template <typename T, typename Dispatch> 1832 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 1833 VULKAN_HPP_NODISCARD getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,ArrayProxy<T> const & data,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1834 VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1835 uint32_t firstQuery, 1836 uint32_t queryCount, 1837 ArrayProxy<T> const & data, 1838 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1839 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1840 Dispatch const & d ) const 1841 { 1842 Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, 1843 static_cast<VkQueryPool>( queryPool ), 1844 firstQuery, 1845 queryCount, 1846 data.size() * sizeof( T ), 1847 reinterpret_cast<void *>( data.data() ), 1848 static_cast<VkDeviceSize>( stride ), 1849 static_cast<VkQueryResultFlags>( flags ) ) ); 1850 return createResultValue( result, 1851 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", 1852 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1853 } 1854 1855 template <typename T, typename Allocator, typename Dispatch> 1856 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T, Allocator>> getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1857 Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1858 uint32_t firstQuery, 1859 uint32_t queryCount, 1860 size_t dataSize, 1861 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1862 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1863 Dispatch const & d ) const 1864 { 1865 VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); 1866 std::vector<T, Allocator> data( dataSize / sizeof( T ) ); 1867 Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, 1868 static_cast<VkQueryPool>( queryPool ), 1869 firstQuery, 1870 queryCount, 1871 data.size() * sizeof( T ), 1872 reinterpret_cast<void *>( data.data() ), 1873 static_cast<VkDeviceSize>( stride ), 1874 static_cast<VkQueryResultFlags>( flags ) ) ); 1875 return createResultValue( result, 1876 data, 1877 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", 1878 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1879 } 1880 1881 template <typename T, typename Dispatch> 1882 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T> getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1883 Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 1884 uint32_t firstQuery, 1885 uint32_t queryCount, 1886 VULKAN_HPP_NAMESPACE::DeviceSize stride, 1887 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 1888 Dispatch const & d ) const 1889 { 1890 T data; 1891 Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, 1892 static_cast<VkQueryPool>( queryPool ), 1893 firstQuery, 1894 queryCount, 1895 sizeof( T ), 1896 reinterpret_cast<void *>( &data ), 1897 static_cast<VkDeviceSize>( stride ), 1898 static_cast<VkQueryResultFlags>( flags ) ) ); 1899 return createResultValue( result, 1900 data, 1901 VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", 1902 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 1903 } 1904 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1905 1906 template <typename Dispatch> 1907 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Buffer * pBuffer,Dispatch const & d) const1908 Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, 1909 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1910 VULKAN_HPP_NAMESPACE::Buffer * pBuffer, 1911 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1912 { 1913 return static_cast<Result>( d.vkCreateBuffer( m_device, 1914 reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), 1915 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 1916 reinterpret_cast<VkBuffer *>( pBuffer ) ) ); 1917 } 1918 1919 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1920 template <typename Dispatch> 1921 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer(const BufferCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1922 Device::createBuffer( const BufferCreateInfo & createInfo, 1923 Optional<const AllocationCallbacks> allocator, 1924 Dispatch const & d ) const 1925 { 1926 VULKAN_HPP_NAMESPACE::Buffer buffer; 1927 Result result = static_cast<Result>( 1928 d.vkCreateBuffer( m_device, 1929 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 1930 reinterpret_cast<const VkAllocationCallbacks *>( 1931 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1932 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 1933 return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); 1934 } 1935 1936 # ifndef VULKAN_HPP_NO_SMART_HANDLE 1937 template <typename Dispatch> 1938 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 1939 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type createBufferUnique(const BufferCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1940 Device::createBufferUnique( const BufferCreateInfo & createInfo, 1941 Optional<const AllocationCallbacks> allocator, 1942 Dispatch const & d ) const 1943 { 1944 VULKAN_HPP_NAMESPACE::Buffer buffer; 1945 Result result = static_cast<Result>( 1946 d.vkCreateBuffer( m_device, 1947 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), 1948 reinterpret_cast<const VkAllocationCallbacks *>( 1949 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 1950 reinterpret_cast<VkBuffer *>( &buffer ) ) ); 1951 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 1952 return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( 1953 result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter ); 1954 } 1955 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 1956 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1957 1958 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1959 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 1960 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1961 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1962 { 1963 d.vkDestroyBuffer( 1964 m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1965 } 1966 1967 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1968 template <typename Dispatch> destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1969 VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 1970 Optional<const AllocationCallbacks> allocator, 1971 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1972 { 1973 d.vkDestroyBuffer( m_device, 1974 static_cast<VkBuffer>( buffer ), 1975 reinterpret_cast<const VkAllocationCallbacks *>( 1976 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1977 } 1978 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 1979 1980 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1981 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 1982 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 1983 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1984 { 1985 d.vkDestroyBuffer( 1986 m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 1987 } 1988 1989 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 1990 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const1991 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, 1992 Optional<const AllocationCallbacks> allocator, 1993 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 1994 { 1995 d.vkDestroyBuffer( m_device, 1996 static_cast<VkBuffer>( buffer ), 1997 reinterpret_cast<const VkAllocationCallbacks *>( 1998 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 1999 } 2000 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2001 2002 template <typename Dispatch> 2003 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferView * pView,Dispatch const & d) const2004 Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, 2005 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2006 VULKAN_HPP_NAMESPACE::BufferView * pView, 2007 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2008 { 2009 return static_cast<Result>( d.vkCreateBufferView( m_device, 2010 reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), 2011 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2012 reinterpret_cast<VkBufferView *>( pView ) ) ); 2013 } 2014 2015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2016 template <typename Dispatch> 2017 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2018 typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView(const BufferViewCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2019 Device::createBufferView( const BufferViewCreateInfo & createInfo, 2020 Optional<const AllocationCallbacks> allocator, 2021 Dispatch const & d ) const 2022 { 2023 VULKAN_HPP_NAMESPACE::BufferView view; 2024 Result result = static_cast<Result>( 2025 d.vkCreateBufferView( m_device, 2026 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2027 reinterpret_cast<const VkAllocationCallbacks *>( 2028 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2029 reinterpret_cast<VkBufferView *>( &view ) ) ); 2030 return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); 2031 } 2032 2033 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2034 template <typename Dispatch> 2035 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2036 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique(const BufferViewCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2037 Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, 2038 Optional<const AllocationCallbacks> allocator, 2039 Dispatch const & d ) const 2040 { 2041 VULKAN_HPP_NAMESPACE::BufferView view; 2042 Result result = static_cast<Result>( 2043 d.vkCreateBufferView( m_device, 2044 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), 2045 reinterpret_cast<const VkAllocationCallbacks *>( 2046 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2047 reinterpret_cast<VkBufferView *>( &view ) ) ); 2048 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2049 return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( 2050 result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter ); 2051 } 2052 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 2053 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2054 2055 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2056 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2057 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2058 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2059 { 2060 d.vkDestroyBufferView( m_device, 2061 static_cast<VkBufferView>( bufferView ), 2062 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2063 } 2064 2065 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2066 template <typename Dispatch> destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2067 VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2068 Optional<const AllocationCallbacks> allocator, 2069 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2070 { 2071 d.vkDestroyBufferView( m_device, 2072 static_cast<VkBufferView>( bufferView ), 2073 reinterpret_cast<const VkAllocationCallbacks *>( 2074 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2075 } 2076 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2077 2078 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2079 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2080 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2081 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2082 { 2083 d.vkDestroyBufferView( m_device, 2084 static_cast<VkBufferView>( bufferView ), 2085 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2086 } 2087 2088 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2089 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2090 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, 2091 Optional<const AllocationCallbacks> allocator, 2092 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2093 { 2094 d.vkDestroyBufferView( m_device, 2095 static_cast<VkBufferView>( bufferView ), 2096 reinterpret_cast<const VkAllocationCallbacks *>( 2097 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2098 } 2099 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2100 2101 template <typename Dispatch> 2102 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Image * pImage,Dispatch const & d) const2103 Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, 2104 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2105 VULKAN_HPP_NAMESPACE::Image * pImage, 2106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2107 { 2108 return static_cast<Result>( d.vkCreateImage( m_device, 2109 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), 2110 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2111 reinterpret_cast<VkImage *>( pImage ) ) ); 2112 } 2113 2114 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2115 template <typename Dispatch> 2116 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage(const ImageCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2117 Device::createImage( const ImageCreateInfo & createInfo, 2118 Optional<const AllocationCallbacks> allocator, 2119 Dispatch const & d ) const 2120 { 2121 VULKAN_HPP_NAMESPACE::Image image; 2122 Result result = static_cast<Result>( 2123 d.vkCreateImage( m_device, 2124 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2125 reinterpret_cast<const VkAllocationCallbacks *>( 2126 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2127 reinterpret_cast<VkImage *>( &image ) ) ); 2128 return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); 2129 } 2130 2131 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2132 template <typename Dispatch> 2133 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2134 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type createImageUnique(const ImageCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2135 Device::createImageUnique( const ImageCreateInfo & createInfo, 2136 Optional<const AllocationCallbacks> allocator, 2137 Dispatch const & d ) const 2138 { 2139 VULKAN_HPP_NAMESPACE::Image image; 2140 Result result = static_cast<Result>( 2141 d.vkCreateImage( m_device, 2142 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), 2143 reinterpret_cast<const VkAllocationCallbacks *>( 2144 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2145 reinterpret_cast<VkImage *>( &image ) ) ); 2146 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2147 return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>( 2148 result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter ); 2149 } 2150 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 2151 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2152 2153 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2154 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2155 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2156 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2157 { 2158 d.vkDestroyImage( 2159 m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2160 } 2161 2162 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2163 template <typename Dispatch> destroyImage(VULKAN_HPP_NAMESPACE::Image image,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2164 VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, 2165 Optional<const AllocationCallbacks> allocator, 2166 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2167 { 2168 d.vkDestroyImage( m_device, 2169 static_cast<VkImage>( image ), 2170 reinterpret_cast<const VkAllocationCallbacks *>( 2171 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2172 } 2173 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2174 2175 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2176 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2177 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2179 { 2180 d.vkDestroyImage( 2181 m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2182 } 2183 2184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2185 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Image image,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2186 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, 2187 Optional<const AllocationCallbacks> allocator, 2188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2189 { 2190 d.vkDestroyImage( m_device, 2191 static_cast<VkImage>( image ), 2192 reinterpret_cast<const VkAllocationCallbacks *>( 2193 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2194 } 2195 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2196 2197 template <typename Dispatch> getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,Dispatch const & d) const2198 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, 2199 const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, 2200 VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, 2201 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2202 { 2203 d.vkGetImageSubresourceLayout( m_device, 2204 static_cast<VkImage>( image ), 2205 reinterpret_cast<const VkImageSubresource *>( pSubresource ), 2206 reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); 2207 } 2208 2209 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2210 template <typename Dispatch> 2211 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const ImageSubresource & subresource,Dispatch const & d) const2212 Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, 2213 const ImageSubresource & subresource, 2214 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2215 { 2216 VULKAN_HPP_NAMESPACE::SubresourceLayout layout; 2217 d.vkGetImageSubresourceLayout( m_device, 2218 static_cast<VkImage>( image ), 2219 reinterpret_cast<const VkImageSubresource *>( &subresource ), 2220 reinterpret_cast<VkSubresourceLayout *>( &layout ) ); 2221 return layout; 2222 } 2223 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2224 2225 template <typename Dispatch> 2226 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ImageView * pView,Dispatch const & d) const2227 Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, 2228 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2229 VULKAN_HPP_NAMESPACE::ImageView * pView, 2230 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2231 { 2232 return static_cast<Result>( d.vkCreateImageView( m_device, 2233 reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), 2234 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2235 reinterpret_cast<VkImageView *>( pView ) ) ); 2236 } 2237 2238 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2239 template <typename Dispatch> 2240 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2241 typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView(const ImageViewCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2242 Device::createImageView( const ImageViewCreateInfo & createInfo, 2243 Optional<const AllocationCallbacks> allocator, 2244 Dispatch const & d ) const 2245 { 2246 VULKAN_HPP_NAMESPACE::ImageView view; 2247 Result result = static_cast<Result>( 2248 d.vkCreateImageView( m_device, 2249 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2250 reinterpret_cast<const VkAllocationCallbacks *>( 2251 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2252 reinterpret_cast<VkImageView *>( &view ) ) ); 2253 return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); 2254 } 2255 2256 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2257 template <typename Dispatch> 2258 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2259 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique(const ImageViewCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2260 Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, 2261 Optional<const AllocationCallbacks> allocator, 2262 Dispatch const & d ) const 2263 { 2264 VULKAN_HPP_NAMESPACE::ImageView view; 2265 Result result = static_cast<Result>( 2266 d.vkCreateImageView( m_device, 2267 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), 2268 reinterpret_cast<const VkAllocationCallbacks *>( 2269 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2270 reinterpret_cast<VkImageView *>( &view ) ) ); 2271 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2272 return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( 2273 result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter ); 2274 } 2275 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 2276 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2277 2278 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2279 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2280 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2281 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2282 { 2283 d.vkDestroyImageView( 2284 m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2285 } 2286 2287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2288 template <typename Dispatch> destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2289 VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, 2290 Optional<const AllocationCallbacks> allocator, 2291 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2292 { 2293 d.vkDestroyImageView( m_device, 2294 static_cast<VkImageView>( imageView ), 2295 reinterpret_cast<const VkAllocationCallbacks *>( 2296 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2297 } 2298 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2299 2300 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2301 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2302 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2303 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2304 { 2305 d.vkDestroyImageView( 2306 m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2307 } 2308 2309 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2310 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2311 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, 2312 Optional<const AllocationCallbacks> allocator, 2313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2314 { 2315 d.vkDestroyImageView( m_device, 2316 static_cast<VkImageView>( imageView ), 2317 reinterpret_cast<const VkAllocationCallbacks *>( 2318 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2319 } 2320 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2321 2322 template <typename Dispatch> 2323 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,Dispatch const & d) const2324 Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, 2325 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2326 VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, 2327 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2328 { 2329 return static_cast<Result>( 2330 d.vkCreateShaderModule( m_device, 2331 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), 2332 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2333 reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); 2334 } 2335 2336 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2337 template <typename Dispatch> 2338 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2339 typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule(const ShaderModuleCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2340 Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, 2341 Optional<const AllocationCallbacks> allocator, 2342 Dispatch const & d ) const 2343 { 2344 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2345 Result result = static_cast<Result>( 2346 d.vkCreateShaderModule( m_device, 2347 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2348 reinterpret_cast<const VkAllocationCallbacks *>( 2349 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2350 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2351 return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); 2352 } 2353 2354 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2355 template <typename Dispatch> 2356 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2357 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique(const ShaderModuleCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2358 Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, 2359 Optional<const AllocationCallbacks> allocator, 2360 Dispatch const & d ) const 2361 { 2362 VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; 2363 Result result = static_cast<Result>( 2364 d.vkCreateShaderModule( m_device, 2365 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), 2366 reinterpret_cast<const VkAllocationCallbacks *>( 2367 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2368 reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); 2369 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2370 return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( 2371 result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter ); 2372 } 2373 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 2374 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2375 2376 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2377 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2378 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2380 { 2381 d.vkDestroyShaderModule( m_device, 2382 static_cast<VkShaderModule>( shaderModule ), 2383 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2384 } 2385 2386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2387 template <typename Dispatch> destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2388 VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2389 Optional<const AllocationCallbacks> allocator, 2390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2391 { 2392 d.vkDestroyShaderModule( m_device, 2393 static_cast<VkShaderModule>( shaderModule ), 2394 reinterpret_cast<const VkAllocationCallbacks *>( 2395 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2396 } 2397 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2398 2399 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2400 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2401 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2402 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2403 { 2404 d.vkDestroyShaderModule( m_device, 2405 static_cast<VkShaderModule>( shaderModule ), 2406 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2407 } 2408 2409 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2410 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2411 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, 2412 Optional<const AllocationCallbacks> allocator, 2413 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2414 { 2415 d.vkDestroyShaderModule( m_device, 2416 static_cast<VkShaderModule>( shaderModule ), 2417 reinterpret_cast<const VkAllocationCallbacks *>( 2418 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2419 } 2420 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2421 2422 template <typename Dispatch> 2423 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,Dispatch const & d) const2424 Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, 2425 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2426 VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, 2427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2428 { 2429 return static_cast<Result>( 2430 d.vkCreatePipelineCache( m_device, 2431 reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), 2432 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2433 reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); 2434 } 2435 2436 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2437 template <typename Dispatch> 2438 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2439 typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache(const PipelineCacheCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2440 Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, 2441 Optional<const AllocationCallbacks> allocator, 2442 Dispatch const & d ) const 2443 { 2444 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2445 Result result = static_cast<Result>( 2446 d.vkCreatePipelineCache( m_device, 2447 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2448 reinterpret_cast<const VkAllocationCallbacks *>( 2449 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2450 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2451 return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); 2452 } 2453 2454 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2455 template <typename Dispatch> 2456 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 2457 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique(const PipelineCacheCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2458 Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, 2459 Optional<const AllocationCallbacks> allocator, 2460 Dispatch const & d ) const 2461 { 2462 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; 2463 Result result = static_cast<Result>( 2464 d.vkCreatePipelineCache( m_device, 2465 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), 2466 reinterpret_cast<const VkAllocationCallbacks *>( 2467 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2468 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); 2469 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2470 return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( 2471 result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter ); 2472 } 2473 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 2474 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2475 2476 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2477 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2478 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2480 { 2481 d.vkDestroyPipelineCache( m_device, 2482 static_cast<VkPipelineCache>( pipelineCache ), 2483 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2484 } 2485 2486 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2487 template <typename Dispatch> destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2488 VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2489 Optional<const AllocationCallbacks> allocator, 2490 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2491 { 2492 d.vkDestroyPipelineCache( m_device, 2493 static_cast<VkPipelineCache>( pipelineCache ), 2494 reinterpret_cast<const VkAllocationCallbacks *>( 2495 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2496 } 2497 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2498 2499 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2500 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2501 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2503 { 2504 d.vkDestroyPipelineCache( m_device, 2505 static_cast<VkPipelineCache>( pipelineCache ), 2506 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 2507 } 2508 2509 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2510 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2511 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2512 Optional<const AllocationCallbacks> allocator, 2513 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2514 { 2515 d.vkDestroyPipelineCache( m_device, 2516 static_cast<VkPipelineCache>( pipelineCache ), 2517 reinterpret_cast<const VkAllocationCallbacks *>( 2518 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 2519 } 2520 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2521 2522 template <typename Dispatch> 2523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,size_t * pDataSize,void * pData,Dispatch const & d) const2524 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2525 size_t * pDataSize, 2526 void * pData, 2527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2528 { 2529 return static_cast<Result>( 2530 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); 2531 } 2532 2533 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2534 template <typename Uint8_tAllocator, typename Dispatch> 2535 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Dispatch const & d) const2536 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const 2537 { 2538 std::vector<uint8_t, Uint8_tAllocator> data; 2539 size_t dataSize; 2540 Result result; 2541 do 2542 { 2543 result = static_cast<Result>( 2544 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2545 if ( ( result == Result::eSuccess ) && dataSize ) 2546 { 2547 data.resize( dataSize ); 2548 result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, 2549 static_cast<VkPipelineCache>( pipelineCache ), 2550 &dataSize, 2551 reinterpret_cast<void *>( data.data() ) ) ); 2552 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2553 } 2554 } while ( result == Result::eIncomplete ); 2555 if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) 2556 { 2557 data.resize( dataSize ); 2558 } 2559 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2560 } 2561 2562 template <typename Uint8_tAllocator, 2563 typename Dispatch, 2564 typename B, 2565 typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type> 2566 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const2567 Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2568 Uint8_tAllocator & uint8_tAllocator, 2569 Dispatch const & d ) const 2570 { 2571 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 2572 size_t dataSize; 2573 Result result; 2574 do 2575 { 2576 result = static_cast<Result>( 2577 d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); 2578 if ( ( result == Result::eSuccess ) && dataSize ) 2579 { 2580 data.resize( dataSize ); 2581 result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, 2582 static_cast<VkPipelineCache>( pipelineCache ), 2583 &dataSize, 2584 reinterpret_cast<void *>( data.data() ) ) ); 2585 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 2586 } 2587 } while ( result == Result::eIncomplete ); 2588 if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) 2589 { 2590 data.resize( dataSize ); 2591 } 2592 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); 2593 } 2594 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2595 2596 template <typename Dispatch> 2597 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,Dispatch const & d) const2598 Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2599 uint32_t srcCacheCount, 2600 const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, 2601 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2602 { 2603 return static_cast<Result>( d.vkMergePipelineCaches( m_device, 2604 static_cast<VkPipelineCache>( dstCache ), 2605 srcCacheCount, 2606 reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); 2607 } 2608 2609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2610 template <typename Dispatch> 2611 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,Dispatch const & d) const2612 Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, 2613 ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, 2614 Dispatch const & d ) const 2615 { 2616 Result result = 2617 static_cast<Result>( d.vkMergePipelineCaches( m_device, 2618 static_cast<VkPipelineCache>( dstCache ), 2619 srcCaches.size(), 2620 reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) ); 2621 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); 2622 } 2623 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2624 2625 template <typename Dispatch> 2626 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const2627 Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2628 uint32_t createInfoCount, 2629 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, 2630 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2631 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 2632 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2633 { 2634 return static_cast<Result>( 2635 d.vkCreateGraphicsPipelines( m_device, 2636 static_cast<VkPipelineCache>( pipelineCache ), 2637 createInfoCount, 2638 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), 2639 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2640 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 2641 } 2642 2643 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2644 template <typename PipelineAllocator, typename Dispatch> 2645 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2646 Device::createGraphicsPipelines( 2647 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2648 ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2649 Optional<const AllocationCallbacks> allocator, 2650 Dispatch const & d ) const 2651 { 2652 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 2653 Result result = static_cast<Result>( 2654 d.vkCreateGraphicsPipelines( m_device, 2655 static_cast<VkPipelineCache>( pipelineCache ), 2656 createInfos.size(), 2657 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2658 reinterpret_cast<const VkAllocationCallbacks *>( 2659 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2660 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2661 return createResultValue( 2662 result, 2663 pipelines, 2664 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 2665 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2666 } 2667 2668 template <typename PipelineAllocator, 2669 typename Dispatch, 2670 typename B, 2671 typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type> 2672 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2673 Device::createGraphicsPipelines( 2674 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2675 ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2676 Optional<const AllocationCallbacks> allocator, 2677 PipelineAllocator & pipelineAllocator, 2678 Dispatch const & d ) const 2679 { 2680 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 2681 Result result = static_cast<Result>( 2682 d.vkCreateGraphicsPipelines( m_device, 2683 static_cast<VkPipelineCache>( pipelineCache ), 2684 createInfos.size(), 2685 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2686 reinterpret_cast<const VkAllocationCallbacks *>( 2687 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2688 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2689 return createResultValue( 2690 result, 2691 pipelines, 2692 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", 2693 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2694 } 2695 2696 template <typename Dispatch> 2697 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2698 Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2699 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 2700 Optional<const AllocationCallbacks> allocator, 2701 Dispatch const & d ) const 2702 { 2703 Pipeline pipeline; 2704 Result result = static_cast<Result>( 2705 d.vkCreateGraphicsPipelines( m_device, 2706 static_cast<VkPipelineCache>( pipelineCache ), 2707 1, 2708 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 2709 reinterpret_cast<const VkAllocationCallbacks *>( 2710 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2711 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 2712 return createResultValue( 2713 result, 2714 pipeline, 2715 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", 2716 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2717 } 2718 2719 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2720 template <typename Dispatch, typename PipelineAllocator> 2721 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2722 Device::createGraphicsPipelinesUnique( 2723 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2724 ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2725 Optional<const AllocationCallbacks> allocator, 2726 Dispatch const & d ) const 2727 { 2728 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 2729 std::vector<Pipeline> pipelines( createInfos.size() ); 2730 Result result = static_cast<Result>( 2731 d.vkCreateGraphicsPipelines( m_device, 2732 static_cast<VkPipelineCache>( pipelineCache ), 2733 createInfos.size(), 2734 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2735 reinterpret_cast<const VkAllocationCallbacks *>( 2736 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2737 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2738 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 2739 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 2740 { 2741 uniquePipelines.reserve( createInfos.size() ); 2742 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2743 for ( size_t i = 0; i < createInfos.size(); i++ ) 2744 { 2745 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 2746 } 2747 } 2748 return createResultValue( 2749 result, 2750 std::move( uniquePipelines ), 2751 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 2752 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2753 } 2754 2755 template < 2756 typename Dispatch, 2757 typename PipelineAllocator, 2758 typename B, 2759 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type> 2760 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2761 Device::createGraphicsPipelinesUnique( 2762 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2763 ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, 2764 Optional<const AllocationCallbacks> allocator, 2765 PipelineAllocator & pipelineAllocator, 2766 Dispatch const & d ) const 2767 { 2768 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 2769 std::vector<Pipeline> pipelines( createInfos.size() ); 2770 Result result = static_cast<Result>( 2771 d.vkCreateGraphicsPipelines( m_device, 2772 static_cast<VkPipelineCache>( pipelineCache ), 2773 createInfos.size(), 2774 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), 2775 reinterpret_cast<const VkAllocationCallbacks *>( 2776 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2777 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2778 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 2779 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 2780 { 2781 uniquePipelines.reserve( createInfos.size() ); 2782 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2783 for ( size_t i = 0; i < createInfos.size(); i++ ) 2784 { 2785 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 2786 } 2787 } 2788 return createResultValue( 2789 result, 2790 std::move( uniquePipelines ), 2791 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", 2792 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2793 } 2794 2795 template <typename Dispatch> 2796 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2797 Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2798 const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, 2799 Optional<const AllocationCallbacks> allocator, 2800 Dispatch const & d ) const 2801 { 2802 Pipeline pipeline; 2803 Result result = static_cast<Result>( 2804 d.vkCreateGraphicsPipelines( m_device, 2805 static_cast<VkPipelineCache>( pipelineCache ), 2806 1, 2807 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), 2808 reinterpret_cast<const VkAllocationCallbacks *>( 2809 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2810 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 2811 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2812 return createResultValue<Pipeline, Dispatch>( 2813 result, 2814 pipeline, 2815 VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", 2816 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, 2817 deleter ); 2818 } 2819 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 2820 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 2821 2822 template <typename Dispatch> 2823 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const2824 Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2825 uint32_t createInfoCount, 2826 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, 2827 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 2828 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 2829 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 2830 { 2831 return static_cast<Result>( 2832 d.vkCreateComputePipelines( m_device, 2833 static_cast<VkPipelineCache>( pipelineCache ), 2834 createInfoCount, 2835 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), 2836 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 2837 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 2838 } 2839 2840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 2841 template <typename PipelineAllocator, typename Dispatch> 2842 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2843 Device::createComputePipelines( 2844 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2845 ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2846 Optional<const AllocationCallbacks> allocator, 2847 Dispatch const & d ) const 2848 { 2849 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 2850 Result result = static_cast<Result>( 2851 d.vkCreateComputePipelines( m_device, 2852 static_cast<VkPipelineCache>( pipelineCache ), 2853 createInfos.size(), 2854 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 2855 reinterpret_cast<const VkAllocationCallbacks *>( 2856 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2857 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2858 return createResultValue( 2859 result, 2860 pipelines, 2861 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 2862 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2863 } 2864 2865 template <typename PipelineAllocator, 2866 typename Dispatch, 2867 typename B, 2868 typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type> 2869 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2870 Device::createComputePipelines( 2871 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2872 ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2873 Optional<const AllocationCallbacks> allocator, 2874 PipelineAllocator & pipelineAllocator, 2875 Dispatch const & d ) const 2876 { 2877 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 2878 Result result = static_cast<Result>( 2879 d.vkCreateComputePipelines( m_device, 2880 static_cast<VkPipelineCache>( pipelineCache ), 2881 createInfos.size(), 2882 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 2883 reinterpret_cast<const VkAllocationCallbacks *>( 2884 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2885 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2886 return createResultValue( 2887 result, 2888 pipelines, 2889 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", 2890 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2891 } 2892 2893 template <typename Dispatch> 2894 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2895 Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2896 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 2897 Optional<const AllocationCallbacks> allocator, 2898 Dispatch const & d ) const 2899 { 2900 Pipeline pipeline; 2901 Result result = static_cast<Result>( 2902 d.vkCreateComputePipelines( m_device, 2903 static_cast<VkPipelineCache>( pipelineCache ), 2904 1, 2905 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 2906 reinterpret_cast<const VkAllocationCallbacks *>( 2907 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2908 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 2909 return createResultValue( 2910 result, 2911 pipeline, 2912 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", 2913 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2914 } 2915 2916 # ifndef VULKAN_HPP_NO_SMART_HANDLE 2917 template <typename Dispatch, typename PipelineAllocator> 2918 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2919 Device::createComputePipelinesUnique( 2920 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2921 ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2922 Optional<const AllocationCallbacks> allocator, 2923 Dispatch const & d ) const 2924 { 2925 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 2926 std::vector<Pipeline> pipelines( createInfos.size() ); 2927 Result result = static_cast<Result>( 2928 d.vkCreateComputePipelines( m_device, 2929 static_cast<VkPipelineCache>( pipelineCache ), 2930 createInfos.size(), 2931 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 2932 reinterpret_cast<const VkAllocationCallbacks *>( 2933 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2934 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2935 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 2936 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 2937 { 2938 uniquePipelines.reserve( createInfos.size() ); 2939 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2940 for ( size_t i = 0; i < createInfos.size(); i++ ) 2941 { 2942 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 2943 } 2944 } 2945 return createResultValue( 2946 result, 2947 std::move( uniquePipelines ), 2948 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 2949 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2950 } 2951 2952 template < 2953 typename Dispatch, 2954 typename PipelineAllocator, 2955 typename B, 2956 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type> 2957 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2958 Device::createComputePipelinesUnique( 2959 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2960 ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, 2961 Optional<const AllocationCallbacks> allocator, 2962 PipelineAllocator & pipelineAllocator, 2963 Dispatch const & d ) const 2964 { 2965 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 2966 std::vector<Pipeline> pipelines( createInfos.size() ); 2967 Result result = static_cast<Result>( 2968 d.vkCreateComputePipelines( m_device, 2969 static_cast<VkPipelineCache>( pipelineCache ), 2970 createInfos.size(), 2971 reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), 2972 reinterpret_cast<const VkAllocationCallbacks *>( 2973 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 2974 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 2975 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 2976 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 2977 { 2978 uniquePipelines.reserve( createInfos.size() ); 2979 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 2980 for ( size_t i = 0; i < createInfos.size(); i++ ) 2981 { 2982 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 2983 } 2984 } 2985 return createResultValue( 2986 result, 2987 std::move( uniquePipelines ), 2988 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", 2989 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 2990 } 2991 2992 template <typename Dispatch> 2993 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const2994 Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 2995 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, 2996 Optional<const AllocationCallbacks> allocator, 2997 Dispatch const & d ) const 2998 { 2999 Pipeline pipeline; 3000 Result result = static_cast<Result>( 3001 d.vkCreateComputePipelines( m_device, 3002 static_cast<VkPipelineCache>( pipelineCache ), 3003 1, 3004 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), 3005 reinterpret_cast<const VkAllocationCallbacks *>( 3006 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3007 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 3008 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3009 return createResultValue<Pipeline, Dispatch>( 3010 result, 3011 pipeline, 3012 VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", 3013 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, 3014 deleter ); 3015 } 3016 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3017 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3018 3019 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3020 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3021 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3023 { 3024 d.vkDestroyPipeline( 3025 m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3026 } 3027 3028 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3029 template <typename Dispatch> destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3030 VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3031 Optional<const AllocationCallbacks> allocator, 3032 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3033 { 3034 d.vkDestroyPipeline( m_device, 3035 static_cast<VkPipeline>( pipeline ), 3036 reinterpret_cast<const VkAllocationCallbacks *>( 3037 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3038 } 3039 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3040 3041 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3042 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3043 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3044 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3045 { 3046 d.vkDestroyPipeline( 3047 m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3048 } 3049 3050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3051 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3052 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 3053 Optional<const AllocationCallbacks> allocator, 3054 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3055 { 3056 d.vkDestroyPipeline( m_device, 3057 static_cast<VkPipeline>( pipeline ), 3058 reinterpret_cast<const VkAllocationCallbacks *>( 3059 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3060 } 3061 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3062 3063 template <typename Dispatch> 3064 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,Dispatch const & d) const3065 Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, 3066 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3067 VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, 3068 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3069 { 3070 return static_cast<Result>( 3071 d.vkCreatePipelineLayout( m_device, 3072 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), 3073 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3074 reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); 3075 } 3076 3077 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3078 template <typename Dispatch> 3079 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3080 typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout(const PipelineLayoutCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3081 Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, 3082 Optional<const AllocationCallbacks> allocator, 3083 Dispatch const & d ) const 3084 { 3085 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3086 Result result = static_cast<Result>( 3087 d.vkCreatePipelineLayout( m_device, 3088 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3089 reinterpret_cast<const VkAllocationCallbacks *>( 3090 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3091 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3092 return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); 3093 } 3094 3095 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3096 template <typename Dispatch> 3097 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3098 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique(const PipelineLayoutCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3099 Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, 3100 Optional<const AllocationCallbacks> allocator, 3101 Dispatch const & d ) const 3102 { 3103 VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; 3104 Result result = static_cast<Result>( 3105 d.vkCreatePipelineLayout( m_device, 3106 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), 3107 reinterpret_cast<const VkAllocationCallbacks *>( 3108 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3109 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); 3110 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3111 return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( 3112 result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter ); 3113 } 3114 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3115 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3116 3117 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3118 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3119 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3120 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3121 { 3122 d.vkDestroyPipelineLayout( m_device, 3123 static_cast<VkPipelineLayout>( pipelineLayout ), 3124 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3125 } 3126 3127 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3128 template <typename Dispatch> destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3129 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3130 Optional<const AllocationCallbacks> allocator, 3131 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3132 { 3133 d.vkDestroyPipelineLayout( m_device, 3134 static_cast<VkPipelineLayout>( pipelineLayout ), 3135 reinterpret_cast<const VkAllocationCallbacks *>( 3136 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3137 } 3138 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3139 3140 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3141 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3142 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3143 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3144 { 3145 d.vkDestroyPipelineLayout( m_device, 3146 static_cast<VkPipelineLayout>( pipelineLayout ), 3147 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3148 } 3149 3150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3151 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3152 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, 3153 Optional<const AllocationCallbacks> allocator, 3154 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3155 { 3156 d.vkDestroyPipelineLayout( m_device, 3157 static_cast<VkPipelineLayout>( pipelineLayout ), 3158 reinterpret_cast<const VkAllocationCallbacks *>( 3159 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3160 } 3161 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3162 3163 template <typename Dispatch> 3164 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Sampler * pSampler,Dispatch const & d) const3165 Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, 3166 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3167 VULKAN_HPP_NAMESPACE::Sampler * pSampler, 3168 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3169 { 3170 return static_cast<Result>( d.vkCreateSampler( m_device, 3171 reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), 3172 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3173 reinterpret_cast<VkSampler *>( pSampler ) ) ); 3174 } 3175 3176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3177 template <typename Dispatch> 3178 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3179 typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler(const SamplerCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3180 Device::createSampler( const SamplerCreateInfo & createInfo, 3181 Optional<const AllocationCallbacks> allocator, 3182 Dispatch const & d ) const 3183 { 3184 VULKAN_HPP_NAMESPACE::Sampler sampler; 3185 Result result = static_cast<Result>( 3186 d.vkCreateSampler( m_device, 3187 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3188 reinterpret_cast<const VkAllocationCallbacks *>( 3189 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3190 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3191 return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); 3192 } 3193 3194 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3195 template <typename Dispatch> 3196 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3197 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type createSamplerUnique(const SamplerCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3198 Device::createSamplerUnique( const SamplerCreateInfo & createInfo, 3199 Optional<const AllocationCallbacks> allocator, 3200 Dispatch const & d ) const 3201 { 3202 VULKAN_HPP_NAMESPACE::Sampler sampler; 3203 Result result = static_cast<Result>( 3204 d.vkCreateSampler( m_device, 3205 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), 3206 reinterpret_cast<const VkAllocationCallbacks *>( 3207 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3208 reinterpret_cast<VkSampler *>( &sampler ) ) ); 3209 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3210 return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( 3211 result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter ); 3212 } 3213 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3214 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3215 3216 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3217 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3218 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3219 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3220 { 3221 d.vkDestroySampler( 3222 m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3223 } 3224 3225 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3226 template <typename Dispatch> destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3227 VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, 3228 Optional<const AllocationCallbacks> allocator, 3229 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3230 { 3231 d.vkDestroySampler( m_device, 3232 static_cast<VkSampler>( sampler ), 3233 reinterpret_cast<const VkAllocationCallbacks *>( 3234 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3235 } 3236 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3237 3238 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3239 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3240 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3241 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3242 { 3243 d.vkDestroySampler( 3244 m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3245 } 3246 3247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3248 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3249 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, 3250 Optional<const AllocationCallbacks> allocator, 3251 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3252 { 3253 d.vkDestroySampler( m_device, 3254 static_cast<VkSampler>( sampler ), 3255 reinterpret_cast<const VkAllocationCallbacks *>( 3256 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3257 } 3258 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3259 3260 template <typename Dispatch> 3261 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,Dispatch const & d) const3262 Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 3263 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3264 VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, 3265 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3266 { 3267 return static_cast<Result>( 3268 d.vkCreateDescriptorSetLayout( m_device, 3269 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 3270 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3271 reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); 3272 } 3273 3274 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3275 template <typename Dispatch> 3276 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3277 typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout(const DescriptorSetLayoutCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3278 Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, 3279 Optional<const AllocationCallbacks> allocator, 3280 Dispatch const & d ) const 3281 { 3282 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3283 Result result = static_cast<Result>( 3284 d.vkCreateDescriptorSetLayout( m_device, 3285 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3286 reinterpret_cast<const VkAllocationCallbacks *>( 3287 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3288 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3289 return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); 3290 } 3291 3292 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3293 template <typename Dispatch> 3294 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3295 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique(const DescriptorSetLayoutCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3296 Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, 3297 Optional<const AllocationCallbacks> allocator, 3298 Dispatch const & d ) const 3299 { 3300 VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; 3301 Result result = static_cast<Result>( 3302 d.vkCreateDescriptorSetLayout( m_device, 3303 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 3304 reinterpret_cast<const VkAllocationCallbacks *>( 3305 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3306 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); 3307 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3308 return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( 3309 result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter ); 3310 } 3311 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3312 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3313 3314 template <typename Dispatch> 3315 VULKAN_HPP_INLINE void destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3316 Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3317 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3319 { 3320 d.vkDestroyDescriptorSetLayout( m_device, 3321 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3322 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3323 } 3324 3325 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3326 template <typename Dispatch> 3327 VULKAN_HPP_INLINE void destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3328 Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3329 Optional<const AllocationCallbacks> allocator, 3330 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3331 { 3332 d.vkDestroyDescriptorSetLayout( m_device, 3333 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3334 reinterpret_cast<const VkAllocationCallbacks *>( 3335 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3336 } 3337 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3338 3339 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3340 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3341 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3342 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3343 { 3344 d.vkDestroyDescriptorSetLayout( m_device, 3345 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3346 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3347 } 3348 3349 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3350 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3351 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, 3352 Optional<const AllocationCallbacks> allocator, 3353 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3354 { 3355 d.vkDestroyDescriptorSetLayout( m_device, 3356 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), 3357 reinterpret_cast<const VkAllocationCallbacks *>( 3358 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3359 } 3360 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3361 3362 template <typename Dispatch> 3363 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,Dispatch const & d) const3364 Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, 3365 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3366 VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, 3367 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3368 { 3369 return static_cast<Result>( 3370 d.vkCreateDescriptorPool( m_device, 3371 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), 3372 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3373 reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); 3374 } 3375 3376 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3377 template <typename Dispatch> 3378 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3379 typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool(const DescriptorPoolCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3380 Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, 3381 Optional<const AllocationCallbacks> allocator, 3382 Dispatch const & d ) const 3383 { 3384 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3385 Result result = static_cast<Result>( 3386 d.vkCreateDescriptorPool( m_device, 3387 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3388 reinterpret_cast<const VkAllocationCallbacks *>( 3389 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3390 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3391 return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); 3392 } 3393 3394 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3395 template <typename Dispatch> 3396 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3397 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique(const DescriptorPoolCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3398 Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, 3399 Optional<const AllocationCallbacks> allocator, 3400 Dispatch const & d ) const 3401 { 3402 VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; 3403 Result result = static_cast<Result>( 3404 d.vkCreateDescriptorPool( m_device, 3405 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), 3406 reinterpret_cast<const VkAllocationCallbacks *>( 3407 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3408 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); 3409 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3410 return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( 3411 result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter ); 3412 } 3413 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3414 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3415 3416 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3417 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3418 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3419 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3420 { 3421 d.vkDestroyDescriptorPool( m_device, 3422 static_cast<VkDescriptorPool>( descriptorPool ), 3423 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3424 } 3425 3426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3427 template <typename Dispatch> destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3428 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3429 Optional<const AllocationCallbacks> allocator, 3430 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3431 { 3432 d.vkDestroyDescriptorPool( m_device, 3433 static_cast<VkDescriptorPool>( descriptorPool ), 3434 reinterpret_cast<const VkAllocationCallbacks *>( 3435 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3436 } 3437 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3438 3439 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3440 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3441 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3442 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3443 { 3444 d.vkDestroyDescriptorPool( m_device, 3445 static_cast<VkDescriptorPool>( descriptorPool ), 3446 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3447 } 3448 3449 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3450 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3451 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3452 Optional<const AllocationCallbacks> allocator, 3453 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3454 { 3455 d.vkDestroyDescriptorPool( m_device, 3456 static_cast<VkDescriptorPool>( descriptorPool ), 3457 reinterpret_cast<const VkAllocationCallbacks *>( 3458 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3459 } 3460 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3461 3462 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 3463 template <typename Dispatch> resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3464 VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3465 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3466 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3467 { 3468 return static_cast<Result>( d.vkResetDescriptorPool( 3469 m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); 3470 } 3471 #else 3472 template <typename Dispatch> 3473 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const3474 Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3475 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, 3476 Dispatch const & d ) const 3477 { 3478 Result result = static_cast<Result>( d.vkResetDescriptorPool( 3479 m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); 3480 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); 3481 } 3482 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3483 3484 template <typename Dispatch> 3485 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3486 Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, 3487 VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3488 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3489 { 3490 return static_cast<Result>( 3491 d.vkAllocateDescriptorSets( m_device, 3492 reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), 3493 reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); 3494 } 3495 3496 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3497 template <typename DescriptorSetAllocator, typename Dispatch> 3498 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3499 typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3500 Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3501 { 3502 std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); 3503 Result result = static_cast<Result>( 3504 d.vkAllocateDescriptorSets( m_device, 3505 reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), 3506 reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3507 return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3508 } 3509 3510 template <typename DescriptorSetAllocator, 3511 typename Dispatch, 3512 typename B, 3513 typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type> 3514 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3515 typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets(const DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const3516 Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, 3517 DescriptorSetAllocator & descriptorSetAllocator, 3518 Dispatch const & d ) const 3519 { 3520 std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, 3521 descriptorSetAllocator ); 3522 Result result = static_cast<Result>( 3523 d.vkAllocateDescriptorSets( m_device, 3524 reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), 3525 reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3526 return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); 3527 } 3528 3529 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3530 template <typename Dispatch, typename DescriptorSetAllocator> 3531 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3532 typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const3533 Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const 3534 { 3535 std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; 3536 std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 3537 Result result = static_cast<Result>( 3538 d.vkAllocateDescriptorSets( m_device, 3539 reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), 3540 reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3541 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 3542 { 3543 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 3544 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 3545 for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) 3546 { 3547 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) ); 3548 } 3549 } 3550 return createResultValue( 3551 result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 3552 } 3553 3554 template <typename Dispatch, 3555 typename DescriptorSetAllocator, 3556 typename B, 3557 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, 3558 int>::type> 3559 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3560 typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique(const DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const3561 Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, 3562 DescriptorSetAllocator & descriptorSetAllocator, 3563 Dispatch const & d ) const 3564 { 3565 std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( 3566 descriptorSetAllocator ); 3567 std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); 3568 Result result = static_cast<Result>( 3569 d.vkAllocateDescriptorSets( m_device, 3570 reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), 3571 reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); 3572 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 3573 { 3574 uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); 3575 PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); 3576 for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) 3577 { 3578 uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) ); 3579 } 3580 } 3581 return createResultValue( 3582 result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); 3583 } 3584 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3585 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3586 3587 template <typename Dispatch> freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3588 VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3589 uint32_t descriptorSetCount, 3590 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3591 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3592 { 3593 return static_cast<Result>( 3594 d.vkFreeDescriptorSets( m_device, 3595 static_cast<VkDescriptorPool>( descriptorPool ), 3596 descriptorSetCount, 3597 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 3598 } 3599 3600 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3601 template <typename Dispatch> 3602 VULKAN_HPP_INLINE typename ResultValueType<void>::type freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const3603 Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3604 ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 3605 Dispatch const & d ) const 3606 { 3607 Result result = static_cast<Result>( 3608 d.vkFreeDescriptorSets( m_device, 3609 static_cast<VkDescriptorPool>( descriptorPool ), 3610 descriptorSets.size(), 3611 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) ); 3612 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" ); 3613 } 3614 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3615 3616 template <typename Dispatch> free(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const3617 VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3618 uint32_t descriptorSetCount, 3619 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 3620 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3621 { 3622 return static_cast<Result>( 3623 d.vkFreeDescriptorSets( m_device, 3624 static_cast<VkDescriptorPool>( descriptorPool ), 3625 descriptorSetCount, 3626 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); 3627 } 3628 3629 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3630 template <typename Dispatch> 3631 VULKAN_HPP_INLINE typename ResultValueType<void>::type free(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const3632 Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, 3633 ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 3634 Dispatch const & d ) const 3635 { 3636 Result result = static_cast<Result>( 3637 d.vkFreeDescriptorSets( m_device, 3638 static_cast<VkDescriptorPool>( descriptorPool ), 3639 descriptorSets.size(), 3640 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) ); 3641 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" ); 3642 } 3643 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3644 3645 template <typename Dispatch> 3646 VULKAN_HPP_INLINE void updateDescriptorSets(uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,Dispatch const & d) const3647 Device::updateDescriptorSets( uint32_t descriptorWriteCount, 3648 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 3649 uint32_t descriptorCopyCount, 3650 const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, 3651 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3652 { 3653 d.vkUpdateDescriptorSets( m_device, 3654 descriptorWriteCount, 3655 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), 3656 descriptorCopyCount, 3657 reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); 3658 } 3659 3660 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3661 template <typename Dispatch> 3662 VULKAN_HPP_INLINE void updateDescriptorSets(ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,Dispatch const & d) const3663 Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 3664 ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, 3665 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3666 { 3667 d.vkUpdateDescriptorSets( m_device, 3668 descriptorWrites.size(), 3669 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), 3670 descriptorCopies.size(), 3671 reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); 3672 } 3673 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3674 3675 template <typename Dispatch> 3676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,Dispatch const & d) const3677 Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, 3678 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3679 VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, 3680 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3681 { 3682 return static_cast<Result>( d.vkCreateFramebuffer( m_device, 3683 reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), 3684 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3685 reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); 3686 } 3687 3688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3689 template <typename Dispatch> 3690 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3691 typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer(const FramebufferCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3692 Device::createFramebuffer( const FramebufferCreateInfo & createInfo, 3693 Optional<const AllocationCallbacks> allocator, 3694 Dispatch const & d ) const 3695 { 3696 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 3697 Result result = static_cast<Result>( 3698 d.vkCreateFramebuffer( m_device, 3699 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 3700 reinterpret_cast<const VkAllocationCallbacks *>( 3701 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3702 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 3703 return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); 3704 } 3705 3706 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3707 template <typename Dispatch> 3708 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3709 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique(const FramebufferCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3710 Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, 3711 Optional<const AllocationCallbacks> allocator, 3712 Dispatch const & d ) const 3713 { 3714 VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; 3715 Result result = static_cast<Result>( 3716 d.vkCreateFramebuffer( m_device, 3717 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), 3718 reinterpret_cast<const VkAllocationCallbacks *>( 3719 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3720 reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); 3721 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3722 return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( 3723 result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter ); 3724 } 3725 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3726 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3727 3728 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3729 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3730 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3731 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3732 { 3733 d.vkDestroyFramebuffer( m_device, 3734 static_cast<VkFramebuffer>( framebuffer ), 3735 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3736 } 3737 3738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3739 template <typename Dispatch> destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3740 VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3741 Optional<const AllocationCallbacks> allocator, 3742 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3743 { 3744 d.vkDestroyFramebuffer( m_device, 3745 static_cast<VkFramebuffer>( framebuffer ), 3746 reinterpret_cast<const VkAllocationCallbacks *>( 3747 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3748 } 3749 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3750 3751 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3752 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3753 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3754 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3755 { 3756 d.vkDestroyFramebuffer( m_device, 3757 static_cast<VkFramebuffer>( framebuffer ), 3758 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3759 } 3760 3761 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3762 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3763 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, 3764 Optional<const AllocationCallbacks> allocator, 3765 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3766 { 3767 d.vkDestroyFramebuffer( m_device, 3768 static_cast<VkFramebuffer>( framebuffer ), 3769 reinterpret_cast<const VkAllocationCallbacks *>( 3770 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3771 } 3772 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3773 3774 template <typename Dispatch> 3775 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const3776 Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, 3777 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3778 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 3779 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3780 { 3781 return static_cast<Result>( d.vkCreateRenderPass( m_device, 3782 reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), 3783 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3784 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 3785 } 3786 3787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3788 template <typename Dispatch> 3789 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3790 typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass(const RenderPassCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3791 Device::createRenderPass( const RenderPassCreateInfo & createInfo, 3792 Optional<const AllocationCallbacks> allocator, 3793 Dispatch const & d ) const 3794 { 3795 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 3796 Result result = static_cast<Result>( 3797 d.vkCreateRenderPass( m_device, 3798 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 3799 reinterpret_cast<const VkAllocationCallbacks *>( 3800 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3801 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 3802 return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); 3803 } 3804 3805 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3806 template <typename Dispatch> 3807 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3808 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique(const RenderPassCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3809 Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, 3810 Optional<const AllocationCallbacks> allocator, 3811 Dispatch const & d ) const 3812 { 3813 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 3814 Result result = static_cast<Result>( 3815 d.vkCreateRenderPass( m_device, 3816 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), 3817 reinterpret_cast<const VkAllocationCallbacks *>( 3818 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3819 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 3820 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3821 return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( 3822 result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter ); 3823 } 3824 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3825 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3826 3827 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3828 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3829 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3830 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3831 { 3832 d.vkDestroyRenderPass( m_device, 3833 static_cast<VkRenderPass>( renderPass ), 3834 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3835 } 3836 3837 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3838 template <typename Dispatch> destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3839 VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3840 Optional<const AllocationCallbacks> allocator, 3841 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3842 { 3843 d.vkDestroyRenderPass( m_device, 3844 static_cast<VkRenderPass>( renderPass ), 3845 reinterpret_cast<const VkAllocationCallbacks *>( 3846 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3847 } 3848 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3849 3850 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3851 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3852 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3853 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3854 { 3855 d.vkDestroyRenderPass( m_device, 3856 static_cast<VkRenderPass>( renderPass ), 3857 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3858 } 3859 3860 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3861 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3862 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3863 Optional<const AllocationCallbacks> allocator, 3864 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3865 { 3866 d.vkDestroyRenderPass( m_device, 3867 static_cast<VkRenderPass>( renderPass ), 3868 reinterpret_cast<const VkAllocationCallbacks *>( 3869 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3870 } 3871 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3872 3873 template <typename Dispatch> getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const3874 VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3875 VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, 3876 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3877 { 3878 d.vkGetRenderAreaGranularity( 3879 m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); 3880 } 3881 3882 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3883 template <typename Dispatch> 3884 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Dispatch const & d) const3885 Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, 3886 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3887 { 3888 VULKAN_HPP_NAMESPACE::Extent2D granularity; 3889 d.vkGetRenderAreaGranularity( 3890 m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); 3891 return granularity; 3892 } 3893 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3894 3895 template <typename Dispatch> 3896 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,Dispatch const & d) const3897 Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, 3898 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3899 VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, 3900 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3901 { 3902 return static_cast<Result>( d.vkCreateCommandPool( m_device, 3903 reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), 3904 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 3905 reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); 3906 } 3907 3908 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3909 template <typename Dispatch> 3910 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3911 typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool(const CommandPoolCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3912 Device::createCommandPool( const CommandPoolCreateInfo & createInfo, 3913 Optional<const AllocationCallbacks> allocator, 3914 Dispatch const & d ) const 3915 { 3916 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 3917 Result result = static_cast<Result>( 3918 d.vkCreateCommandPool( m_device, 3919 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 3920 reinterpret_cast<const VkAllocationCallbacks *>( 3921 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3922 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 3923 return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); 3924 } 3925 3926 # ifndef VULKAN_HPP_NO_SMART_HANDLE 3927 template <typename Dispatch> 3928 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 3929 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique(const CommandPoolCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3930 Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, 3931 Optional<const AllocationCallbacks> allocator, 3932 Dispatch const & d ) const 3933 { 3934 VULKAN_HPP_NAMESPACE::CommandPool commandPool; 3935 Result result = static_cast<Result>( 3936 d.vkCreateCommandPool( m_device, 3937 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), 3938 reinterpret_cast<const VkAllocationCallbacks *>( 3939 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 3940 reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); 3941 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 3942 return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( 3943 result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter ); 3944 } 3945 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 3946 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3947 3948 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3949 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 3950 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3951 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3952 { 3953 d.vkDestroyCommandPool( m_device, 3954 static_cast<VkCommandPool>( commandPool ), 3955 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3956 } 3957 3958 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3959 template <typename Dispatch> destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3960 VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 3961 Optional<const AllocationCallbacks> allocator, 3962 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3963 { 3964 d.vkDestroyCommandPool( m_device, 3965 static_cast<VkCommandPool>( commandPool ), 3966 reinterpret_cast<const VkAllocationCallbacks *>( 3967 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3968 } 3969 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3970 3971 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const3972 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 3973 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 3974 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3975 { 3976 d.vkDestroyCommandPool( m_device, 3977 static_cast<VkCommandPool>( commandPool ), 3978 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 3979 } 3980 3981 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 3982 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CommandPool commandPool,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const3983 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 3984 Optional<const AllocationCallbacks> allocator, 3985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 3986 { 3987 d.vkDestroyCommandPool( m_device, 3988 static_cast<VkCommandPool>( commandPool ), 3989 reinterpret_cast<const VkAllocationCallbacks *>( 3990 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 3991 } 3992 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 3993 3994 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 3995 template <typename Dispatch> 3996 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const3997 Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 3998 VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, 3999 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4000 { 4001 return static_cast<Result>( d.vkResetCommandPool( 4002 m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4003 } 4004 #else 4005 template <typename Dispatch> 4006 VULKAN_HPP_INLINE typename ResultValueType<void>::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const4007 Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4008 VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, 4009 Dispatch const & d ) const 4010 { 4011 Result result = static_cast<Result>( d.vkResetCommandPool( 4012 m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); 4013 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); 4014 } 4015 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4016 4017 template <typename Dispatch> 4018 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4019 Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, 4020 VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4021 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4022 { 4023 return static_cast<Result>( 4024 d.vkAllocateCommandBuffers( m_device, 4025 reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), 4026 reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); 4027 } 4028 4029 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4030 template <typename CommandBufferAllocator, typename Dispatch> 4031 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 4032 typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4033 Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4034 { 4035 std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); 4036 Result result = static_cast<Result>( 4037 d.vkAllocateCommandBuffers( m_device, 4038 reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), 4039 reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4040 return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4041 } 4042 4043 template <typename CommandBufferAllocator, 4044 typename Dispatch, 4045 typename B, 4046 typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type> 4047 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 4048 typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers(const CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4049 Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, 4050 CommandBufferAllocator & commandBufferAllocator, 4051 Dispatch const & d ) const 4052 { 4053 std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, 4054 commandBufferAllocator ); 4055 Result result = static_cast<Result>( 4056 d.vkAllocateCommandBuffers( m_device, 4057 reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), 4058 reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4059 return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); 4060 } 4061 4062 # ifndef VULKAN_HPP_NO_SMART_HANDLE 4063 template <typename Dispatch, typename CommandBufferAllocator> 4064 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 4065 typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const4066 Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const 4067 { 4068 std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; 4069 std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4070 Result result = static_cast<Result>( 4071 d.vkAllocateCommandBuffers( m_device, 4072 reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), 4073 reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4074 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 4075 { 4076 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4077 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4078 for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) 4079 { 4080 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) ); 4081 } 4082 } 4083 return createResultValue( 4084 result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4085 } 4086 4087 template <typename Dispatch, 4088 typename CommandBufferAllocator, 4089 typename B, 4090 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, 4091 int>::type> 4092 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 4093 typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique(const CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const4094 Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, 4095 CommandBufferAllocator & commandBufferAllocator, 4096 Dispatch const & d ) const 4097 { 4098 std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( 4099 commandBufferAllocator ); 4100 std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); 4101 Result result = static_cast<Result>( 4102 d.vkAllocateCommandBuffers( m_device, 4103 reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), 4104 reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); 4105 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 4106 { 4107 uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); 4108 PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); 4109 for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) 4110 { 4111 uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) ); 4112 } 4113 } 4114 return createResultValue( 4115 result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); 4116 } 4117 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 4118 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4119 4120 template <typename Dispatch> freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4121 VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4122 uint32_t commandBufferCount, 4123 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4124 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4125 { 4126 d.vkFreeCommandBuffers( m_device, 4127 static_cast<VkCommandPool>( commandPool ), 4128 commandBufferCount, 4129 reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4130 } 4131 4132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4133 template <typename Dispatch> 4134 VULKAN_HPP_INLINE void freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const4135 Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4136 ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4137 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4138 { 4139 d.vkFreeCommandBuffers( m_device, 4140 static_cast<VkCommandPool>( commandPool ), 4141 commandBuffers.size(), 4142 reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4143 } 4144 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4145 4146 template <typename Dispatch> free(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const4147 VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4148 uint32_t commandBufferCount, 4149 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 4150 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4151 { 4152 d.vkFreeCommandBuffers( m_device, 4153 static_cast<VkCommandPool>( commandPool ), 4154 commandBufferCount, 4155 reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 4156 } 4157 4158 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4159 template <typename Dispatch> free(VULKAN_HPP_NAMESPACE::CommandPool commandPool,ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const4160 VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 4161 ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 4162 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4163 { 4164 d.vkFreeCommandBuffers( m_device, 4165 static_cast<VkCommandPool>( commandPool ), 4166 commandBuffers.size(), 4167 reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 4168 } 4169 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4170 4171 template <typename Dispatch> begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,Dispatch const & d) const4172 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( 4173 const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4174 { 4175 return static_cast<Result>( 4176 d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); 4177 } 4178 4179 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4180 template <typename Dispatch> 4181 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type begin(const CommandBufferBeginInfo & beginInfo,Dispatch const & d) const4182 CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const 4183 { 4184 Result result = static_cast<Result>( 4185 d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) ); 4186 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); 4187 } 4188 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4189 4190 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4191 template <typename Dispatch> end(Dispatch const & d) const4192 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4193 { 4194 return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4195 } 4196 #else 4197 template <typename Dispatch> 4198 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type end(Dispatch const & d) const4199 CommandBuffer::end( Dispatch const & d ) const 4200 { 4201 Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); 4202 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); 4203 } 4204 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4205 4206 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 4207 template <typename Dispatch> reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4208 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( 4209 VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4210 { 4211 return static_cast<Result>( 4212 d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4213 } 4214 #else 4215 template <typename Dispatch> 4216 VULKAN_HPP_INLINE typename ResultValueType<void>::type reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const4217 CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const 4218 { 4219 Result result = 4220 static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); 4221 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); 4222 } 4223 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4224 4225 template <typename Dispatch> bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const4226 VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4227 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 4228 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4229 { 4230 d.vkCmdBindPipeline( 4231 m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); 4232 } 4233 4234 template <typename Dispatch> setViewport(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const4235 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4236 uint32_t viewportCount, 4237 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 4238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4239 { 4240 d.vkCmdSetViewport( 4241 m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 4242 } 4243 4244 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4245 template <typename Dispatch> setViewport(uint32_t firstViewport,ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const4246 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, 4247 ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 4248 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4249 { 4250 d.vkCmdSetViewport( 4251 m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 4252 } 4253 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4254 4255 template <typename Dispatch> setScissor(uint32_t firstScissor,uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const4256 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4257 uint32_t scissorCount, 4258 const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, 4259 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4260 { 4261 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 4262 } 4263 4264 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4265 template <typename Dispatch> setScissor(uint32_t firstScissor,ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const4266 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, 4267 ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 4268 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4269 { 4270 d.vkCmdSetScissor( 4271 m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 4272 } 4273 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4274 4275 template <typename Dispatch> setLineWidth(float lineWidth,Dispatch const & d) const4276 VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4277 { 4278 d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); 4279 } 4280 4281 template <typename Dispatch> setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor,Dispatch const & d) const4282 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, 4283 float depthBiasClamp, 4284 float depthBiasSlopeFactor, 4285 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4286 { 4287 d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); 4288 } 4289 4290 template <typename Dispatch> setBlendConstants(const float blendConstants[4],Dispatch const & d) const4291 VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], 4292 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4293 { 4294 d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); 4295 } 4296 4297 template <typename Dispatch> setDepthBounds(float minDepthBounds,float maxDepthBounds,Dispatch const & d) const4298 VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, 4299 float maxDepthBounds, 4300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4301 { 4302 d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); 4303 } 4304 4305 template <typename Dispatch> setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t compareMask,Dispatch const & d) const4306 VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 4307 uint32_t compareMask, 4308 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4309 { 4310 d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); 4311 } 4312 4313 template <typename Dispatch> setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t writeMask,Dispatch const & d) const4314 VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 4315 uint32_t writeMask, 4316 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4317 { 4318 d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); 4319 } 4320 4321 template <typename Dispatch> setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t reference,Dispatch const & d) const4322 VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 4323 uint32_t reference, 4324 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4325 { 4326 d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference ); 4327 } 4328 4329 template <typename Dispatch> bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets,Dispatch const & d) const4330 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4331 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4332 uint32_t firstSet, 4333 uint32_t descriptorSetCount, 4334 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, 4335 uint32_t dynamicOffsetCount, 4336 const uint32_t * pDynamicOffsets, 4337 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4338 { 4339 d.vkCmdBindDescriptorSets( m_commandBuffer, 4340 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4341 static_cast<VkPipelineLayout>( layout ), 4342 firstSet, 4343 descriptorSetCount, 4344 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), 4345 dynamicOffsetCount, 4346 pDynamicOffsets ); 4347 } 4348 4349 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4350 template <typename Dispatch> 4351 VULKAN_HPP_INLINE void bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,ArrayProxy<const uint32_t> const & dynamicOffsets,Dispatch const & d) const4352 CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 4353 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 4354 uint32_t firstSet, 4355 ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, 4356 ArrayProxy<const uint32_t> const & dynamicOffsets, 4357 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4358 { 4359 d.vkCmdBindDescriptorSets( m_commandBuffer, 4360 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 4361 static_cast<VkPipelineLayout>( layout ), 4362 firstSet, 4363 descriptorSets.size(), 4364 reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), 4365 dynamicOffsets.size(), 4366 dynamicOffsets.data() ); 4367 } 4368 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4369 4370 template <typename Dispatch> bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const4371 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, 4372 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4373 VULKAN_HPP_NAMESPACE::IndexType indexType, 4374 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4375 { 4376 d.vkCmdBindIndexBuffer( m_commandBuffer, 4377 static_cast<VkBuffer>( buffer ), 4378 static_cast<VkDeviceSize>( offset ), 4379 static_cast<VkIndexType>( indexType ) ); 4380 } 4381 4382 template <typename Dispatch> bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const4383 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4384 uint32_t bindingCount, 4385 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 4386 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 4387 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4388 { 4389 d.vkCmdBindVertexBuffers( m_commandBuffer, 4390 firstBinding, 4391 bindingCount, 4392 reinterpret_cast<const VkBuffer *>( pBuffers ), 4393 reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); 4394 } 4395 4396 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4397 template <typename Dispatch> 4398 VULKAN_HPP_INLINE void bindVertexBuffers(uint32_t firstBinding,ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const4399 CommandBuffer::bindVertexBuffers( uint32_t firstBinding, 4400 ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 4401 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 4402 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 4403 { 4404 # ifdef VULKAN_HPP_NO_EXCEPTIONS 4405 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 4406 # else 4407 if ( buffers.size() != offsets.size() ) 4408 { 4409 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 4410 "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); 4411 } 4412 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 4413 4414 d.vkCmdBindVertexBuffers( m_commandBuffer, 4415 firstBinding, 4416 buffers.size(), 4417 reinterpret_cast<const VkBuffer *>( buffers.data() ), 4418 reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); 4419 } 4420 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4421 4422 template <typename Dispatch> draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance,Dispatch const & d) const4423 VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, 4424 uint32_t instanceCount, 4425 uint32_t firstVertex, 4426 uint32_t firstInstance, 4427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4428 { 4429 d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); 4430 } 4431 4432 template <typename Dispatch> drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance,Dispatch const & d) const4433 VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, 4434 uint32_t instanceCount, 4435 uint32_t firstIndex, 4436 int32_t vertexOffset, 4437 uint32_t firstInstance, 4438 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4439 { 4440 d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); 4441 } 4442 4443 template <typename Dispatch> drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const4444 VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4445 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4446 uint32_t drawCount, 4447 uint32_t stride, 4448 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4449 { 4450 d.vkCmdDrawIndirect( 4451 m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 4452 } 4453 4454 template <typename Dispatch> drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const4455 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4456 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4457 uint32_t drawCount, 4458 uint32_t stride, 4459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4460 { 4461 d.vkCmdDrawIndexedIndirect( 4462 m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 4463 } 4464 4465 template <typename Dispatch> dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const4466 VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, 4467 uint32_t groupCountY, 4468 uint32_t groupCountZ, 4469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4470 { 4471 d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); 4472 } 4473 4474 template <typename Dispatch> dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const4475 VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, 4476 VULKAN_HPP_NAMESPACE::DeviceSize offset, 4477 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4478 { 4479 d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); 4480 } 4481 4482 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,Dispatch const & d) const4483 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4484 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4485 uint32_t regionCount, 4486 const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, 4487 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4488 { 4489 d.vkCmdCopyBuffer( m_commandBuffer, 4490 static_cast<VkBuffer>( srcBuffer ), 4491 static_cast<VkBuffer>( dstBuffer ), 4492 regionCount, 4493 reinterpret_cast<const VkBufferCopy *>( pRegions ) ); 4494 } 4495 4496 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4497 template <typename Dispatch> copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,Dispatch const & d) const4498 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4499 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4500 ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, 4501 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4502 { 4503 d.vkCmdCopyBuffer( m_commandBuffer, 4504 static_cast<VkBuffer>( srcBuffer ), 4505 static_cast<VkBuffer>( dstBuffer ), 4506 regions.size(), 4507 reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); 4508 } 4509 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4510 4511 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,Dispatch const & d) const4512 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4513 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4514 VULKAN_HPP_NAMESPACE::Image dstImage, 4515 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4516 uint32_t regionCount, 4517 const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, 4518 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4519 { 4520 d.vkCmdCopyImage( m_commandBuffer, 4521 static_cast<VkImage>( srcImage ), 4522 static_cast<VkImageLayout>( srcImageLayout ), 4523 static_cast<VkImage>( dstImage ), 4524 static_cast<VkImageLayout>( dstImageLayout ), 4525 regionCount, 4526 reinterpret_cast<const VkImageCopy *>( pRegions ) ); 4527 } 4528 4529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4530 template <typename Dispatch> copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,Dispatch const & d) const4531 VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4532 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4533 VULKAN_HPP_NAMESPACE::Image dstImage, 4534 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4535 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, 4536 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4537 { 4538 d.vkCmdCopyImage( m_commandBuffer, 4539 static_cast<VkImage>( srcImage ), 4540 static_cast<VkImageLayout>( srcImageLayout ), 4541 static_cast<VkImage>( dstImage ), 4542 static_cast<VkImageLayout>( dstImageLayout ), 4543 regions.size(), 4544 reinterpret_cast<const VkImageCopy *>( regions.data() ) ); 4545 } 4546 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4547 4548 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const4549 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4550 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4551 VULKAN_HPP_NAMESPACE::Image dstImage, 4552 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4553 uint32_t regionCount, 4554 const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, 4555 VULKAN_HPP_NAMESPACE::Filter filter, 4556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4557 { 4558 d.vkCmdBlitImage( m_commandBuffer, 4559 static_cast<VkImage>( srcImage ), 4560 static_cast<VkImageLayout>( srcImageLayout ), 4561 static_cast<VkImage>( dstImage ), 4562 static_cast<VkImageLayout>( dstImageLayout ), 4563 regionCount, 4564 reinterpret_cast<const VkImageBlit *>( pRegions ), 4565 static_cast<VkFilter>( filter ) ); 4566 } 4567 4568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4569 template <typename Dispatch> blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const4570 VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4571 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4572 VULKAN_HPP_NAMESPACE::Image dstImage, 4573 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4574 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, 4575 VULKAN_HPP_NAMESPACE::Filter filter, 4576 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4577 { 4578 d.vkCmdBlitImage( m_commandBuffer, 4579 static_cast<VkImage>( srcImage ), 4580 static_cast<VkImageLayout>( srcImageLayout ), 4581 static_cast<VkImage>( dstImage ), 4582 static_cast<VkImageLayout>( dstImageLayout ), 4583 regions.size(), 4584 reinterpret_cast<const VkImageBlit *>( regions.data() ), 4585 static_cast<VkFilter>( filter ) ); 4586 } 4587 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4588 4589 template <typename Dispatch> copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const4590 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4591 VULKAN_HPP_NAMESPACE::Image dstImage, 4592 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4593 uint32_t regionCount, 4594 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 4595 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4596 { 4597 d.vkCmdCopyBufferToImage( m_commandBuffer, 4598 static_cast<VkBuffer>( srcBuffer ), 4599 static_cast<VkImage>( dstImage ), 4600 static_cast<VkImageLayout>( dstImageLayout ), 4601 regionCount, 4602 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 4603 } 4604 4605 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4606 template <typename Dispatch> 4607 VULKAN_HPP_INLINE void copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const4608 CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, 4609 VULKAN_HPP_NAMESPACE::Image dstImage, 4610 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4611 ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 4612 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4613 { 4614 d.vkCmdCopyBufferToImage( m_commandBuffer, 4615 static_cast<VkBuffer>( srcBuffer ), 4616 static_cast<VkImage>( dstImage ), 4617 static_cast<VkImageLayout>( dstImageLayout ), 4618 regions.size(), 4619 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 4620 } 4621 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4622 4623 template <typename Dispatch> copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const4624 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 4625 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4626 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4627 uint32_t regionCount, 4628 const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, 4629 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4630 { 4631 d.vkCmdCopyImageToBuffer( m_commandBuffer, 4632 static_cast<VkImage>( srcImage ), 4633 static_cast<VkImageLayout>( srcImageLayout ), 4634 static_cast<VkBuffer>( dstBuffer ), 4635 regionCount, 4636 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); 4637 } 4638 4639 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4640 template <typename Dispatch> 4641 VULKAN_HPP_INLINE void copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const4642 CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, 4643 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4644 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4645 ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, 4646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4647 { 4648 d.vkCmdCopyImageToBuffer( m_commandBuffer, 4649 static_cast<VkImage>( srcImage ), 4650 static_cast<VkImageLayout>( srcImageLayout ), 4651 static_cast<VkBuffer>( dstBuffer ), 4652 regions.size(), 4653 reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); 4654 } 4655 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4656 4657 template <typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize dataSize,const void * pData,Dispatch const & d) const4658 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4659 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4660 VULKAN_HPP_NAMESPACE::DeviceSize dataSize, 4661 const void * pData, 4662 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4663 { 4664 d.vkCmdUpdateBuffer( m_commandBuffer, 4665 static_cast<VkBuffer>( dstBuffer ), 4666 static_cast<VkDeviceSize>( dstOffset ), 4667 static_cast<VkDeviceSize>( dataSize ), 4668 pData ); 4669 } 4670 4671 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4672 template <typename T, typename Dispatch> updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,ArrayProxy<const T> const & data,Dispatch const & d) const4673 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4674 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4675 ArrayProxy<const T> const & data, 4676 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4677 { 4678 d.vkCmdUpdateBuffer( m_commandBuffer, 4679 static_cast<VkBuffer>( dstBuffer ), 4680 static_cast<VkDeviceSize>( dstOffset ), 4681 data.size() * sizeof( T ), 4682 reinterpret_cast<const void *>( data.data() ) ); 4683 } 4684 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4685 4686 template <typename Dispatch> fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize size,uint32_t data,Dispatch const & d) const4687 VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4688 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4689 VULKAN_HPP_NAMESPACE::DeviceSize size, 4690 uint32_t data, 4691 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4692 { 4693 d.vkCmdFillBuffer( m_commandBuffer, 4694 static_cast<VkBuffer>( dstBuffer ), 4695 static_cast<VkDeviceSize>( dstOffset ), 4696 static_cast<VkDeviceSize>( size ), 4697 data ); 4698 } 4699 4700 template <typename Dispatch> clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const4701 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 4702 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4703 const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, 4704 uint32_t rangeCount, 4705 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 4706 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4707 { 4708 d.vkCmdClearColorImage( m_commandBuffer, 4709 static_cast<VkImage>( image ), 4710 static_cast<VkImageLayout>( imageLayout ), 4711 reinterpret_cast<const VkClearColorValue *>( pColor ), 4712 rangeCount, 4713 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 4714 } 4715 4716 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4717 template <typename Dispatch> 4718 VULKAN_HPP_INLINE void clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const ClearColorValue & color,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const4719 CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, 4720 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4721 const ClearColorValue & color, 4722 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 4723 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4724 { 4725 d.vkCmdClearColorImage( m_commandBuffer, 4726 static_cast<VkImage>( image ), 4727 static_cast<VkImageLayout>( imageLayout ), 4728 reinterpret_cast<const VkClearColorValue *>( &color ), 4729 ranges.size(), 4730 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 4731 } 4732 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4733 4734 template <typename Dispatch> 4735 VULKAN_HPP_INLINE void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const4736 CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 4737 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4738 const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, 4739 uint32_t rangeCount, 4740 const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, 4741 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4742 { 4743 d.vkCmdClearDepthStencilImage( m_commandBuffer, 4744 static_cast<VkImage>( image ), 4745 static_cast<VkImageLayout>( imageLayout ), 4746 reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), 4747 rangeCount, 4748 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); 4749 } 4750 4751 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4752 template <typename Dispatch> 4753 VULKAN_HPP_INLINE void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const ClearDepthStencilValue & depthStencil,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const4754 CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, 4755 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 4756 const ClearDepthStencilValue & depthStencil, 4757 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, 4758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4759 { 4760 d.vkCmdClearDepthStencilImage( m_commandBuffer, 4761 static_cast<VkImage>( image ), 4762 static_cast<VkImageLayout>( imageLayout ), 4763 reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), 4764 ranges.size(), 4765 reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); 4766 } 4767 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4768 4769 template <typename Dispatch> clearAttachments(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,uint32_t rectCount,const VULKAN_HPP_NAMESPACE::ClearRect * pRects,Dispatch const & d) const4770 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, 4771 const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, 4772 uint32_t rectCount, 4773 const VULKAN_HPP_NAMESPACE::ClearRect * pRects, 4774 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4775 { 4776 d.vkCmdClearAttachments( m_commandBuffer, 4777 attachmentCount, 4778 reinterpret_cast<const VkClearAttachment *>( pAttachments ), 4779 rectCount, 4780 reinterpret_cast<const VkClearRect *>( pRects ) ); 4781 } 4782 4783 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4784 template <typename Dispatch> 4785 VULKAN_HPP_INLINE void clearAttachments(ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,Dispatch const & d) const4786 CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, 4787 ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, 4788 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4789 { 4790 d.vkCmdClearAttachments( m_commandBuffer, 4791 attachments.size(), 4792 reinterpret_cast<const VkClearAttachment *>( attachments.data() ), 4793 rects.size(), 4794 reinterpret_cast<const VkClearRect *>( rects.data() ) ); 4795 } 4796 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4797 4798 template <typename Dispatch> resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,Dispatch const & d) const4799 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4800 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4801 VULKAN_HPP_NAMESPACE::Image dstImage, 4802 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4803 uint32_t regionCount, 4804 const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, 4805 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4806 { 4807 d.vkCmdResolveImage( m_commandBuffer, 4808 static_cast<VkImage>( srcImage ), 4809 static_cast<VkImageLayout>( srcImageLayout ), 4810 static_cast<VkImage>( dstImage ), 4811 static_cast<VkImageLayout>( dstImageLayout ), 4812 regionCount, 4813 reinterpret_cast<const VkImageResolve *>( pRegions ) ); 4814 } 4815 4816 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4817 template <typename Dispatch> 4818 VULKAN_HPP_INLINE void resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,Dispatch const & d) const4819 CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, 4820 VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, 4821 VULKAN_HPP_NAMESPACE::Image dstImage, 4822 VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, 4823 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, 4824 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4825 { 4826 d.vkCmdResolveImage( m_commandBuffer, 4827 static_cast<VkImage>( srcImage ), 4828 static_cast<VkImageLayout>( srcImageLayout ), 4829 static_cast<VkImage>( dstImage ), 4830 static_cast<VkImageLayout>( dstImageLayout ), 4831 regions.size(), 4832 reinterpret_cast<const VkImageResolve *>( regions.data() ) ); 4833 } 4834 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4835 4836 template <typename Dispatch> setEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const4837 VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, 4838 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 4839 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4840 { 4841 d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 4842 } 4843 4844 template <typename Dispatch> resetEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const4845 VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, 4846 VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, 4847 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4848 { 4849 d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); 4850 } 4851 4852 template <typename Dispatch> 4853 VULKAN_HPP_INLINE void waitEvents(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const4854 CommandBuffer::waitEvents( uint32_t eventCount, 4855 const VULKAN_HPP_NAMESPACE::Event * pEvents, 4856 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4857 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4858 uint32_t memoryBarrierCount, 4859 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 4860 uint32_t bufferMemoryBarrierCount, 4861 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 4862 uint32_t imageMemoryBarrierCount, 4863 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 4864 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4865 { 4866 d.vkCmdWaitEvents( m_commandBuffer, 4867 eventCount, 4868 reinterpret_cast<const VkEvent *>( pEvents ), 4869 static_cast<VkPipelineStageFlags>( srcStageMask ), 4870 static_cast<VkPipelineStageFlags>( dstStageMask ), 4871 memoryBarrierCount, 4872 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 4873 bufferMemoryBarrierCount, 4874 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 4875 imageMemoryBarrierCount, 4876 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 4877 } 4878 4879 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4880 template <typename Dispatch> 4881 VULKAN_HPP_INLINE void waitEvents(ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const4882 CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 4883 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4884 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4885 ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 4886 ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 4887 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 4888 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4889 { 4890 d.vkCmdWaitEvents( m_commandBuffer, 4891 events.size(), 4892 reinterpret_cast<const VkEvent *>( events.data() ), 4893 static_cast<VkPipelineStageFlags>( srcStageMask ), 4894 static_cast<VkPipelineStageFlags>( dstStageMask ), 4895 memoryBarriers.size(), 4896 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 4897 bufferMemoryBarriers.size(), 4898 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 4899 imageMemoryBarriers.size(), 4900 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 4901 } 4902 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4903 4904 template <typename Dispatch> 4905 VULKAN_HPP_INLINE void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const4906 CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4907 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4908 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 4909 uint32_t memoryBarrierCount, 4910 const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, 4911 uint32_t bufferMemoryBarrierCount, 4912 const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, 4913 uint32_t imageMemoryBarrierCount, 4914 const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, 4915 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4916 { 4917 d.vkCmdPipelineBarrier( m_commandBuffer, 4918 static_cast<VkPipelineStageFlags>( srcStageMask ), 4919 static_cast<VkPipelineStageFlags>( dstStageMask ), 4920 static_cast<VkDependencyFlags>( dependencyFlags ), 4921 memoryBarrierCount, 4922 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), 4923 bufferMemoryBarrierCount, 4924 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), 4925 imageMemoryBarrierCount, 4926 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); 4927 } 4928 4929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 4930 template <typename Dispatch> pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const4931 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( 4932 VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, 4933 VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, 4934 VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, 4935 ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, 4936 ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, 4937 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, 4938 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4939 { 4940 d.vkCmdPipelineBarrier( m_commandBuffer, 4941 static_cast<VkPipelineStageFlags>( srcStageMask ), 4942 static_cast<VkPipelineStageFlags>( dstStageMask ), 4943 static_cast<VkDependencyFlags>( dependencyFlags ), 4944 memoryBarriers.size(), 4945 reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), 4946 bufferMemoryBarriers.size(), 4947 reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), 4948 imageMemoryBarriers.size(), 4949 reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); 4950 } 4951 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 4952 4953 template <typename Dispatch> beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,Dispatch const & d) const4954 VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 4955 uint32_t query, 4956 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 4957 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4958 { 4959 d.vkCmdBeginQuery( 4960 m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); 4961 } 4962 4963 template <typename Dispatch> endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const4964 VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 4965 uint32_t query, 4966 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4967 { 4968 d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query ); 4969 } 4970 4971 template <typename Dispatch> resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const4972 VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 4973 uint32_t firstQuery, 4974 uint32_t queryCount, 4975 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4976 { 4977 d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 4978 } 4979 4980 template <typename Dispatch> writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const4981 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 4982 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 4983 uint32_t query, 4984 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 4985 { 4986 d.vkCmdWriteTimestamp( m_commandBuffer, 4987 static_cast<VkPipelineStageFlagBits>( pipelineStage ), 4988 static_cast<VkQueryPool>( queryPool ), 4989 query ); 4990 } 4991 4992 template <typename Dispatch> copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const4993 VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 4994 uint32_t firstQuery, 4995 uint32_t queryCount, 4996 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 4997 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 4998 VULKAN_HPP_NAMESPACE::DeviceSize stride, 4999 VULKAN_HPP_NAMESPACE::QueryResultFlags flags, 5000 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5001 { 5002 d.vkCmdCopyQueryPoolResults( m_commandBuffer, 5003 static_cast<VkQueryPool>( queryPool ), 5004 firstQuery, 5005 queryCount, 5006 static_cast<VkBuffer>( dstBuffer ), 5007 static_cast<VkDeviceSize>( dstOffset ), 5008 static_cast<VkDeviceSize>( stride ), 5009 static_cast<VkQueryResultFlags>( flags ) ); 5010 } 5011 5012 template <typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues,Dispatch const & d) const5013 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5014 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5015 uint32_t offset, 5016 uint32_t size, 5017 const void * pValues, 5018 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5019 { 5020 d.vkCmdPushConstants( m_commandBuffer, 5021 static_cast<VkPipelineLayout>( layout ), 5022 static_cast<VkShaderStageFlags>( stageFlags ), 5023 offset, 5024 size, 5025 pValues ); 5026 } 5027 5028 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5029 template <typename T, typename Dispatch> pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,ArrayProxy<const T> const & values,Dispatch const & d) const5030 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, 5031 VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, 5032 uint32_t offset, 5033 ArrayProxy<const T> const & values, 5034 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5035 { 5036 d.vkCmdPushConstants( m_commandBuffer, 5037 static_cast<VkPipelineLayout>( layout ), 5038 static_cast<VkShaderStageFlags>( stageFlags ), 5039 offset, 5040 values.size() * sizeof( T ), 5041 reinterpret_cast<const void *>( values.data() ) ); 5042 } 5043 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5044 5045 template <typename Dispatch> 5046 VULKAN_HPP_INLINE void beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5047 CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 5048 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5049 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5050 { 5051 d.vkCmdBeginRenderPass( m_commandBuffer, 5052 reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), 5053 static_cast<VkSubpassContents>( contents ) ); 5054 } 5055 5056 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5057 template <typename Dispatch> beginRenderPass(const RenderPassBeginInfo & renderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5058 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, 5059 VULKAN_HPP_NAMESPACE::SubpassContents contents, 5060 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5061 { 5062 d.vkCmdBeginRenderPass( m_commandBuffer, 5063 reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), 5064 static_cast<VkSubpassContents>( contents ) ); 5065 } 5066 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5067 5068 template <typename Dispatch> nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const5069 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, 5070 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5071 { 5072 d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) ); 5073 } 5074 5075 template <typename Dispatch> endRenderPass(Dispatch const & d) const5076 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5077 { 5078 d.vkCmdEndRenderPass( m_commandBuffer ); 5079 } 5080 5081 template <typename Dispatch> executeCommands(uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const5082 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, 5083 const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, 5084 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5085 { 5086 d.vkCmdExecuteCommands( 5087 m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); 5088 } 5089 5090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5091 template <typename Dispatch> 5092 VULKAN_HPP_INLINE void executeCommands(ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const5093 CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, 5094 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5095 { 5096 d.vkCmdExecuteCommands( 5097 m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); 5098 } 5099 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5100 5101 //=== VK_VERSION_1_1 === 5102 5103 template <typename Dispatch> enumerateInstanceVersion(uint32_t * pApiVersion,Dispatch const & d)5104 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, 5105 Dispatch const & d ) VULKAN_HPP_NOEXCEPT 5106 { 5107 return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); 5108 } 5109 5110 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5111 template <typename Dispatch> enumerateInstanceVersion(Dispatch const & d)5112 VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) 5113 { 5114 uint32_t apiVersion; 5115 Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) ); 5116 return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); 5117 } 5118 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5119 5120 template <typename Dispatch> 5121 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindBufferMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const5122 Device::bindBufferMemory2( uint32_t bindInfoCount, 5123 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 5124 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5125 { 5126 return static_cast<Result>( d.vkBindBufferMemory2( 5127 m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 5128 } 5129 5130 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5131 template <typename Dispatch> 5132 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2(ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const5133 Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, 5134 Dispatch const & d ) const 5135 { 5136 Result result = static_cast<Result>( d.vkBindBufferMemory2( 5137 m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 5138 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); 5139 } 5140 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5141 5142 template <typename Dispatch> 5143 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindImageMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const5144 Device::bindImageMemory2( uint32_t bindInfoCount, 5145 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 5146 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5147 { 5148 return static_cast<Result>( 5149 d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 5150 } 5151 5152 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5153 template <typename Dispatch> 5154 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2(ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const5155 Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, 5156 Dispatch const & d ) const 5157 { 5158 Result result = static_cast<Result>( d.vkBindImageMemory2( 5159 m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 5160 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); 5161 } 5162 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5163 5164 template <typename Dispatch> 5165 VULKAN_HPP_INLINE void getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const5166 Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, 5167 uint32_t localDeviceIndex, 5168 uint32_t remoteDeviceIndex, 5169 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 5170 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5171 { 5172 d.vkGetDeviceGroupPeerMemoryFeatures( m_device, 5173 heapIndex, 5174 localDeviceIndex, 5175 remoteDeviceIndex, 5176 reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 5177 } 5178 5179 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5180 template <typename Dispatch> 5181 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const5182 Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, 5183 uint32_t localDeviceIndex, 5184 uint32_t remoteDeviceIndex, 5185 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5186 { 5187 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 5188 d.vkGetDeviceGroupPeerMemoryFeatures( m_device, 5189 heapIndex, 5190 localDeviceIndex, 5191 remoteDeviceIndex, 5192 reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 5193 return peerMemoryFeatures; 5194 } 5195 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5196 5197 template <typename Dispatch> setDeviceMask(uint32_t deviceMask,Dispatch const & d) const5198 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, 5199 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5200 { 5201 d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); 5202 } 5203 5204 template <typename Dispatch> dispatchBase(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const5205 VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, 5206 uint32_t baseGroupY, 5207 uint32_t baseGroupZ, 5208 uint32_t groupCountX, 5209 uint32_t groupCountY, 5210 uint32_t groupCountZ, 5211 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5212 { 5213 d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 5214 } 5215 5216 template <typename Dispatch> enumeratePhysicalDeviceGroups(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const5217 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( 5218 uint32_t * pPhysicalDeviceGroupCount, 5219 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 5220 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5221 { 5222 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( 5223 m_instance, 5224 pPhysicalDeviceGroupCount, 5225 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 5226 } 5227 5228 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5229 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 5230 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5231 typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(Dispatch const & d) const5232 Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const 5233 { 5234 std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 5235 uint32_t physicalDeviceGroupCount; 5236 Result result; 5237 do 5238 { 5239 result = 5240 static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5241 if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) 5242 { 5243 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5244 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( 5245 m_instance, 5246 &physicalDeviceGroupCount, 5247 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5248 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5249 } 5250 } while ( result == Result::eIncomplete ); 5251 if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) 5252 { 5253 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5254 } 5255 return createResultValue( 5256 result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5257 } 5258 5259 template < 5260 typename PhysicalDeviceGroupPropertiesAllocator, 5261 typename Dispatch, 5262 typename B, 5263 typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type> 5264 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 5265 typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const5266 Instance::enumeratePhysicalDeviceGroups( 5267 PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 5268 { 5269 std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 5270 physicalDeviceGroupPropertiesAllocator ); 5271 uint32_t physicalDeviceGroupCount; 5272 Result result; 5273 do 5274 { 5275 result = 5276 static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); 5277 if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) 5278 { 5279 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5280 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( 5281 m_instance, 5282 &physicalDeviceGroupCount, 5283 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 5284 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 5285 } 5286 } while ( result == Result::eIncomplete ); 5287 if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) 5288 { 5289 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 5290 } 5291 return createResultValue( 5292 result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); 5293 } 5294 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5295 5296 template <typename Dispatch> 5297 VULKAN_HPP_INLINE void getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5298 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 5299 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5301 { 5302 d.vkGetImageMemoryRequirements2( m_device, 5303 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), 5304 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5305 } 5306 5307 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5308 template <typename Dispatch> 5309 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(const ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5310 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, 5311 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5312 { 5313 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5314 d.vkGetImageMemoryRequirements2( m_device, 5315 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), 5316 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5317 return memoryRequirements; 5318 } 5319 5320 template <typename X, typename Y, typename... Z, typename Dispatch> 5321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getImageMemoryRequirements2(const ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const5322 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, 5323 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5324 { 5325 StructureChain<X, Y, Z...> structureChain; 5326 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = 5327 structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 5328 d.vkGetImageMemoryRequirements2( m_device, 5329 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), 5330 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5331 return structureChain; 5332 } 5333 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5334 5335 template <typename Dispatch> 5336 VULKAN_HPP_INLINE void getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5337 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 5338 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 5339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5340 { 5341 d.vkGetBufferMemoryRequirements2( m_device, 5342 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), 5343 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 5344 } 5345 5346 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5347 template <typename Dispatch> 5348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(const BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const5349 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, 5350 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5351 { 5352 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 5353 d.vkGetBufferMemoryRequirements2( m_device, 5354 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), 5355 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5356 return memoryRequirements; 5357 } 5358 5359 template <typename X, typename Y, typename... Z, typename Dispatch> 5360 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getBufferMemoryRequirements2(const BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const5361 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, 5362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5363 { 5364 StructureChain<X, Y, Z...> structureChain; 5365 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = 5366 structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 5367 d.vkGetBufferMemoryRequirements2( m_device, 5368 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), 5369 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 5370 return structureChain; 5371 } 5372 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5373 5374 template <typename Dispatch> getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const5375 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( 5376 const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 5377 uint32_t * pSparseMemoryRequirementCount, 5378 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 5379 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5380 { 5381 d.vkGetImageSparseMemoryRequirements2( 5382 m_device, 5383 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 5384 pSparseMemoryRequirementCount, 5385 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 5386 } 5387 5388 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5389 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 5390 VULKAN_HPP_NODISCARD 5391 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const5392 Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, 5393 Dispatch const & d ) const 5394 { 5395 std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 5396 uint32_t sparseMemoryRequirementCount; 5397 d.vkGetImageSparseMemoryRequirements2( m_device, 5398 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 5399 &sparseMemoryRequirementCount, 5400 nullptr ); 5401 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 5402 d.vkGetImageSparseMemoryRequirements2( 5403 m_device, 5404 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 5405 &sparseMemoryRequirementCount, 5406 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 5407 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 5408 return sparseMemoryRequirements; 5409 } 5410 5411 template < 5412 typename SparseImageMemoryRequirements2Allocator, 5413 typename Dispatch, 5414 typename B, 5415 typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type> 5416 VULKAN_HPP_NODISCARD 5417 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2(const ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const5418 Device::getImageSparseMemoryRequirements2( 5419 const ImageSparseMemoryRequirementsInfo2 & info, 5420 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 5421 Dispatch const & d ) const 5422 { 5423 std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 5424 sparseImageMemoryRequirements2Allocator ); 5425 uint32_t sparseMemoryRequirementCount; 5426 d.vkGetImageSparseMemoryRequirements2( m_device, 5427 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 5428 &sparseMemoryRequirementCount, 5429 nullptr ); 5430 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 5431 d.vkGetImageSparseMemoryRequirements2( 5432 m_device, 5433 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 5434 &sparseMemoryRequirementCount, 5435 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 5436 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 5437 return sparseMemoryRequirements; 5438 } 5439 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5440 5441 template <typename Dispatch> getFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const5442 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, 5443 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5444 { 5445 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 5446 } 5447 5448 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5449 template <typename Dispatch> 5450 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const & d) const5451 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5452 { 5453 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 5454 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 5455 return features; 5456 } 5457 5458 template <typename X, typename Y, typename... Z, typename Dispatch> 5459 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getFeatures2(Dispatch const & d) const5460 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5461 { 5462 StructureChain<X, Y, Z...> structureChain; 5463 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = 5464 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 5465 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 5466 return structureChain; 5467 } 5468 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5469 5470 template <typename Dispatch> getProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const5471 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 5472 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5473 { 5474 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, 5475 reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 5476 } 5477 5478 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5479 template <typename Dispatch> 5480 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const & d) const5481 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5482 { 5483 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 5484 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, 5485 reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 5486 return properties; 5487 } 5488 5489 template <typename X, typename Y, typename... Z, typename Dispatch> 5490 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getProperties2(Dispatch const & d) const5491 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5492 { 5493 StructureChain<X, Y, Z...> structureChain; 5494 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = 5495 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 5496 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, 5497 reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 5498 return structureChain; 5499 } 5500 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5501 5502 template <typename Dispatch> 5503 VULKAN_HPP_INLINE void getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const5504 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 5505 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 5506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5507 { 5508 d.vkGetPhysicalDeviceFormatProperties2( 5509 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 5510 } 5511 5512 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5513 template <typename Dispatch> 5514 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const5515 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 5516 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5517 { 5518 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 5519 d.vkGetPhysicalDeviceFormatProperties2( 5520 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 5521 return formatProperties; 5522 } 5523 5524 template <typename X, typename Y, typename... Z, typename Dispatch> 5525 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const5526 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, 5527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5528 { 5529 StructureChain<X, Y, Z...> structureChain; 5530 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = 5531 structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 5532 d.vkGetPhysicalDeviceFormatProperties2( 5533 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 5534 return structureChain; 5535 } 5536 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5537 5538 template <typename Dispatch> getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const5539 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( 5540 const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 5541 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 5542 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5543 { 5544 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( 5545 m_physicalDevice, 5546 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 5547 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 5548 } 5549 5550 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5551 template <typename Dispatch> 5552 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 5553 typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2(const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const5554 PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, 5555 Dispatch const & d ) const 5556 { 5557 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 5558 Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( 5559 m_physicalDevice, 5560 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 5561 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 5562 return createResultValue( 5563 result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 5564 } 5565 5566 template <typename X, typename Y, typename... Z, typename Dispatch> 5567 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2(const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const5568 PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, 5569 Dispatch const & d ) const 5570 { 5571 StructureChain<X, Y, Z...> structureChain; 5572 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = 5573 structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 5574 Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( 5575 m_physicalDevice, 5576 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 5577 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 5578 return createResultValue( 5579 result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); 5580 } 5581 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5582 5583 template <typename Dispatch> 5584 VULKAN_HPP_INLINE void getQueueFamilyProperties2(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const5585 PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, 5586 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 5587 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5588 { 5589 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5590 m_physicalDevice, 5591 pQueueFamilyPropertyCount, 5592 reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 5593 } 5594 5595 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5596 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 5597 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(Dispatch const & d) const5598 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 5599 { 5600 std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 5601 uint32_t queueFamilyPropertyCount; 5602 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5603 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5604 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5605 m_physicalDevice, 5606 &queueFamilyPropertyCount, 5607 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5608 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5609 return queueFamilyProperties; 5610 } 5611 5612 template <typename QueueFamilyProperties2Allocator, 5613 typename Dispatch, 5614 typename B, 5615 typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type> 5616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const5617 PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, 5618 Dispatch const & d ) const 5619 { 5620 std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( 5621 queueFamilyProperties2Allocator ); 5622 uint32_t queueFamilyPropertyCount; 5623 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5624 queueFamilyProperties.resize( queueFamilyPropertyCount ); 5625 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5626 m_physicalDevice, 5627 &queueFamilyPropertyCount, 5628 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5629 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5630 return queueFamilyProperties; 5631 } 5632 5633 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 5634 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(Dispatch const & d) const5635 PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const 5636 { 5637 uint32_t queueFamilyPropertyCount; 5638 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5639 std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount ); 5640 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount ); 5641 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5642 { 5643 queueFamilyProperties[i].pNext = 5644 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 5645 } 5646 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5647 m_physicalDevice, 5648 &queueFamilyPropertyCount, 5649 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5650 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5651 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5652 { 5653 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 5654 } 5655 return returnVector; 5656 } 5657 5658 template <typename StructureChain, 5659 typename StructureChainAllocator, 5660 typename Dispatch, 5661 typename B, 5662 typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type> 5663 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const5664 PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, 5665 Dispatch const & d ) const 5666 { 5667 uint32_t queueFamilyPropertyCount; 5668 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 5669 std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount, 5670 structureChainAllocator ); 5671 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount ); 5672 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5673 { 5674 queueFamilyProperties[i].pNext = 5675 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 5676 } 5677 d.vkGetPhysicalDeviceQueueFamilyProperties2( 5678 m_physicalDevice, 5679 &queueFamilyPropertyCount, 5680 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 5681 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 5682 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 5683 { 5684 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 5685 } 5686 return returnVector; 5687 } 5688 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5689 5690 template <typename Dispatch> 5691 VULKAN_HPP_INLINE void getMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const5692 PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 5693 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5694 { 5695 d.vkGetPhysicalDeviceMemoryProperties2( 5696 m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 5697 } 5698 5699 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5700 template <typename Dispatch> 5701 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const & d) const5702 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5703 { 5704 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 5705 d.vkGetPhysicalDeviceMemoryProperties2( 5706 m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 5707 return memoryProperties; 5708 } 5709 5710 template <typename X, typename Y, typename... Z, typename Dispatch> 5711 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const & d) const5712 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5713 { 5714 StructureChain<X, Y, Z...> structureChain; 5715 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 5716 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 5717 d.vkGetPhysicalDeviceMemoryProperties2( 5718 m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 5719 return structureChain; 5720 } 5721 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5722 5723 template <typename Dispatch> getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const5724 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( 5725 const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 5726 uint32_t * pPropertyCount, 5727 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 5728 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5729 { 5730 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5731 m_physicalDevice, 5732 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 5733 pPropertyCount, 5734 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 5735 } 5736 5737 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5738 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 5739 VULKAN_HPP_NODISCARD 5740 VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const5741 PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 5742 Dispatch const & d ) const 5743 { 5744 std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 5745 uint32_t propertyCount; 5746 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5747 m_physicalDevice, 5748 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 5749 &propertyCount, 5750 nullptr ); 5751 properties.resize( propertyCount ); 5752 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5753 m_physicalDevice, 5754 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 5755 &propertyCount, 5756 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 5757 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 5758 return properties; 5759 } 5760 5761 template < 5762 typename SparseImageFormatProperties2Allocator, 5763 typename Dispatch, 5764 typename B, 5765 typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type> 5766 VULKAN_HPP_NODISCARD 5767 VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2(const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const5768 PhysicalDevice::getSparseImageFormatProperties2( 5769 const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 5770 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 5771 Dispatch const & d ) const 5772 { 5773 std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( 5774 sparseImageFormatProperties2Allocator ); 5775 uint32_t propertyCount; 5776 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5777 m_physicalDevice, 5778 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 5779 &propertyCount, 5780 nullptr ); 5781 properties.resize( propertyCount ); 5782 d.vkGetPhysicalDeviceSparseImageFormatProperties2( 5783 m_physicalDevice, 5784 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 5785 &propertyCount, 5786 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 5787 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 5788 return properties; 5789 } 5790 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5791 5792 template <typename Dispatch> trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const5793 VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 5794 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 5795 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5796 { 5797 d.vkTrimCommandPool( 5798 m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 5799 } 5800 5801 template <typename Dispatch> getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const5802 VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, 5803 VULKAN_HPP_NAMESPACE::Queue * pQueue, 5804 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5805 { 5806 d.vkGetDeviceQueue2( 5807 m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); 5808 } 5809 5810 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5811 template <typename Dispatch> 5812 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue getQueue2(const DeviceQueueInfo2 & queueInfo,Dispatch const & d) const5813 Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5814 { 5815 VULKAN_HPP_NAMESPACE::Queue queue; 5816 d.vkGetDeviceQueue2( 5817 m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); 5818 return queue; 5819 } 5820 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5821 5822 template <typename Dispatch> 5823 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const5824 Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 5825 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5826 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 5827 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5828 { 5829 return static_cast<Result>( 5830 d.vkCreateSamplerYcbcrConversion( m_device, 5831 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 5832 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 5833 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 5834 } 5835 5836 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5837 template <typename Dispatch> 5838 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 5839 typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion(const SamplerYcbcrConversionCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5840 Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, 5841 Optional<const AllocationCallbacks> allocator, 5842 Dispatch const & d ) const 5843 { 5844 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 5845 Result result = static_cast<Result>( 5846 d.vkCreateSamplerYcbcrConversion( m_device, 5847 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 5848 reinterpret_cast<const VkAllocationCallbacks *>( 5849 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 5850 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 5851 return createResultValue( 5852 result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); 5853 } 5854 5855 # ifndef VULKAN_HPP_NO_SMART_HANDLE 5856 template <typename Dispatch> 5857 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 5858 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique(const SamplerYcbcrConversionCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5859 Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, 5860 Optional<const AllocationCallbacks> allocator, 5861 Dispatch const & d ) const 5862 { 5863 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 5864 Result result = static_cast<Result>( 5865 d.vkCreateSamplerYcbcrConversion( m_device, 5866 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 5867 reinterpret_cast<const VkAllocationCallbacks *>( 5868 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 5869 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 5870 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 5871 return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( 5872 result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter ); 5873 } 5874 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 5875 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5876 5877 template <typename Dispatch> 5878 VULKAN_HPP_INLINE void destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const5879 Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 5880 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5881 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5882 { 5883 d.vkDestroySamplerYcbcrConversion( m_device, 5884 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 5885 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 5886 } 5887 5888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5889 template <typename Dispatch> 5890 VULKAN_HPP_INLINE void destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5891 Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 5892 Optional<const AllocationCallbacks> allocator, 5893 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5894 { 5895 d.vkDestroySamplerYcbcrConversion( 5896 m_device, 5897 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 5898 reinterpret_cast<const VkAllocationCallbacks *>( 5899 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 5900 } 5901 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5902 5903 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const5904 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 5905 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5906 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5907 { 5908 d.vkDestroySamplerYcbcrConversion( m_device, 5909 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 5910 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 5911 } 5912 5913 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5914 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5915 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 5916 Optional<const AllocationCallbacks> allocator, 5917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5918 { 5919 d.vkDestroySamplerYcbcrConversion( 5920 m_device, 5921 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 5922 reinterpret_cast<const VkAllocationCallbacks *>( 5923 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 5924 } 5925 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5926 5927 template <typename Dispatch> createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const5928 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( 5929 const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 5930 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5931 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 5932 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5933 { 5934 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( 5935 m_device, 5936 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 5937 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 5938 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 5939 } 5940 5941 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5942 template <typename Dispatch> 5943 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 5944 typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate(const DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5945 Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, 5946 Optional<const AllocationCallbacks> allocator, 5947 Dispatch const & d ) const 5948 { 5949 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 5950 Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( 5951 m_device, 5952 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 5953 reinterpret_cast<const VkAllocationCallbacks *>( 5954 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 5955 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 5956 return createResultValue( 5957 result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); 5958 } 5959 5960 # ifndef VULKAN_HPP_NO_SMART_HANDLE 5961 template <typename Dispatch> 5962 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 5963 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique(const DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5964 Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, 5965 Optional<const AllocationCallbacks> allocator, 5966 Dispatch const & d ) const 5967 { 5968 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 5969 Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( 5970 m_device, 5971 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 5972 reinterpret_cast<const VkAllocationCallbacks *>( 5973 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 5974 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 5975 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 5976 return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 5977 result, 5978 descriptorUpdateTemplate, 5979 VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique", 5980 deleter ); 5981 } 5982 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 5983 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 5984 5985 template <typename Dispatch> 5986 VULKAN_HPP_INLINE void destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const5987 Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 5988 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 5989 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 5990 { 5991 d.vkDestroyDescriptorUpdateTemplate( m_device, 5992 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 5993 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 5994 } 5995 5996 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 5997 template <typename Dispatch> 5998 VULKAN_HPP_INLINE void destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const5999 Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6000 Optional<const AllocationCallbacks> allocator, 6001 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6002 { 6003 d.vkDestroyDescriptorUpdateTemplate( 6004 m_device, 6005 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6006 reinterpret_cast<const VkAllocationCallbacks *>( 6007 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6008 } 6009 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6010 6011 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6012 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6013 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6014 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6015 { 6016 d.vkDestroyDescriptorUpdateTemplate( m_device, 6017 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6018 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6019 } 6020 6021 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6022 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6023 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6024 Optional<const AllocationCallbacks> allocator, 6025 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6026 { 6027 d.vkDestroyDescriptorUpdateTemplate( 6028 m_device, 6029 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6030 reinterpret_cast<const VkAllocationCallbacks *>( 6031 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6032 } 6033 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6034 6035 template <typename Dispatch> 6036 VULKAN_HPP_INLINE void updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const6037 Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 6038 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 6039 const void * pData, 6040 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6041 { 6042 d.vkUpdateDescriptorSetWithTemplate( m_device, 6043 static_cast<VkDescriptorSet>( descriptorSet ), 6044 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 6045 pData ); 6046 } 6047 6048 template <typename Dispatch> getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const6049 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( 6050 const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 6051 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 6052 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6053 { 6054 d.vkGetPhysicalDeviceExternalBufferProperties( 6055 m_physicalDevice, 6056 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 6057 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 6058 } 6059 6060 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6061 template <typename Dispatch> 6062 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(const PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const6063 PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, 6064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6065 { 6066 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 6067 d.vkGetPhysicalDeviceExternalBufferProperties( 6068 m_physicalDevice, 6069 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 6070 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 6071 return externalBufferProperties; 6072 } 6073 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6074 6075 template <typename Dispatch> getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const6076 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( 6077 const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 6078 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 6079 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6080 { 6081 d.vkGetPhysicalDeviceExternalFenceProperties( 6082 m_physicalDevice, 6083 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 6084 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 6085 } 6086 6087 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6088 template <typename Dispatch> 6089 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(const PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const6090 PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, 6091 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6092 { 6093 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 6094 d.vkGetPhysicalDeviceExternalFenceProperties( 6095 m_physicalDevice, 6096 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 6097 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 6098 return externalFenceProperties; 6099 } 6100 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6101 6102 template <typename Dispatch> getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const6103 VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( 6104 const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 6105 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 6106 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6107 { 6108 d.vkGetPhysicalDeviceExternalSemaphoreProperties( 6109 m_physicalDevice, 6110 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 6111 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 6112 } 6113 6114 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6115 template <typename Dispatch> 6116 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const6117 PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, 6118 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6119 { 6120 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 6121 d.vkGetPhysicalDeviceExternalSemaphoreProperties( 6122 m_physicalDevice, 6123 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 6124 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 6125 return externalSemaphoreProperties; 6126 } 6127 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6128 6129 template <typename Dispatch> 6130 VULKAN_HPP_INLINE void getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const6131 Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 6132 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 6133 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6134 { 6135 d.vkGetDescriptorSetLayoutSupport( m_device, 6136 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 6137 reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 6138 } 6139 6140 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6141 template <typename Dispatch> 6142 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(const DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6143 Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, 6144 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6145 { 6146 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 6147 d.vkGetDescriptorSetLayoutSupport( m_device, 6148 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 6149 reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6150 return support; 6151 } 6152 6153 template <typename X, typename Y, typename... Z, typename Dispatch> 6154 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(const DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const6155 Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, 6156 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6157 { 6158 StructureChain<X, Y, Z...> structureChain; 6159 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = 6160 structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 6161 d.vkGetDescriptorSetLayoutSupport( m_device, 6162 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 6163 reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 6164 return structureChain; 6165 } 6166 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6167 6168 //=== VK_VERSION_1_2 === 6169 6170 template <typename Dispatch> drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const6171 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 6172 VULKAN_HPP_NAMESPACE::DeviceSize offset, 6173 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 6174 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 6175 uint32_t maxDrawCount, 6176 uint32_t stride, 6177 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6178 { 6179 d.vkCmdDrawIndirectCount( m_commandBuffer, 6180 static_cast<VkBuffer>( buffer ), 6181 static_cast<VkDeviceSize>( offset ), 6182 static_cast<VkBuffer>( countBuffer ), 6183 static_cast<VkDeviceSize>( countBufferOffset ), 6184 maxDrawCount, 6185 stride ); 6186 } 6187 6188 template <typename Dispatch> drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const6189 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, 6190 VULKAN_HPP_NAMESPACE::DeviceSize offset, 6191 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 6192 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 6193 uint32_t maxDrawCount, 6194 uint32_t stride, 6195 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6196 { 6197 d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, 6198 static_cast<VkBuffer>( buffer ), 6199 static_cast<VkDeviceSize>( offset ), 6200 static_cast<VkBuffer>( countBuffer ), 6201 static_cast<VkDeviceSize>( countBufferOffset ), 6202 maxDrawCount, 6203 stride ); 6204 } 6205 6206 template <typename Dispatch> 6207 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const6208 Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 6209 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6210 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 6211 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6212 { 6213 return static_cast<Result>( d.vkCreateRenderPass2( m_device, 6214 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 6215 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6216 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 6217 } 6218 6219 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6220 template <typename Dispatch> 6221 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6222 typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2(const RenderPassCreateInfo2 & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6223 Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, 6224 Optional<const AllocationCallbacks> allocator, 6225 Dispatch const & d ) const 6226 { 6227 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 6228 Result result = static_cast<Result>( 6229 d.vkCreateRenderPass2( m_device, 6230 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 6231 reinterpret_cast<const VkAllocationCallbacks *>( 6232 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6233 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 6234 return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); 6235 } 6236 6237 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6238 template <typename Dispatch> 6239 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6240 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique(const RenderPassCreateInfo2 & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6241 Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, 6242 Optional<const AllocationCallbacks> allocator, 6243 Dispatch const & d ) const 6244 { 6245 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 6246 Result result = static_cast<Result>( 6247 d.vkCreateRenderPass2( m_device, 6248 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 6249 reinterpret_cast<const VkAllocationCallbacks *>( 6250 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6251 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 6252 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 6253 return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( 6254 result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter ); 6255 } 6256 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 6257 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6258 6259 template <typename Dispatch> 6260 VULKAN_HPP_INLINE void beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const6261 CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 6262 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 6263 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6264 { 6265 d.vkCmdBeginRenderPass2( m_commandBuffer, 6266 reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), 6267 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 6268 } 6269 6270 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6271 template <typename Dispatch> beginRenderPass2(const RenderPassBeginInfo & renderPassBegin,const SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const6272 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, 6273 const SubpassBeginInfo & subpassBeginInfo, 6274 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6275 { 6276 d.vkCmdBeginRenderPass2( m_commandBuffer, 6277 reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), 6278 reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 6279 } 6280 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6281 6282 template <typename Dispatch> nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const6283 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 6284 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 6285 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6286 { 6287 d.vkCmdNextSubpass2( m_commandBuffer, 6288 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), 6289 reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 6290 } 6291 6292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6293 template <typename Dispatch> nextSubpass2(const SubpassBeginInfo & subpassBeginInfo,const SubpassEndInfo & subpassEndInfo,Dispatch const & d) const6294 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, 6295 const SubpassEndInfo & subpassEndInfo, 6296 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6297 { 6298 d.vkCmdNextSubpass2( m_commandBuffer, 6299 reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), 6300 reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 6301 } 6302 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6303 6304 template <typename Dispatch> endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const6305 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 6306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6307 { 6308 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 6309 } 6310 6311 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6312 template <typename Dispatch> endRenderPass2(const SubpassEndInfo & subpassEndInfo,Dispatch const & d) const6313 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, 6314 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6315 { 6316 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 6317 } 6318 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6319 6320 template <typename Dispatch> resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const6321 VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 6322 uint32_t firstQuery, 6323 uint32_t queryCount, 6324 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6325 { 6326 d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 6327 } 6328 6329 template <typename Dispatch> getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const6330 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( 6331 VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6332 { 6333 return static_cast<Result>( 6334 d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 6335 } 6336 6337 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6338 template <typename Dispatch> 6339 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const6340 Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const 6341 { 6342 uint64_t value; 6343 Result result = 6344 static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 6345 return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); 6346 } 6347 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6348 6349 template <typename Dispatch> 6350 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const6351 Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 6352 uint64_t timeout, 6353 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6354 { 6355 return static_cast<Result>( 6356 d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 6357 } 6358 6359 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6360 template <typename Dispatch> waitSemaphores(const SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const6361 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, 6362 uint64_t timeout, 6363 Dispatch const & d ) const 6364 { 6365 Result result = static_cast<Result>( 6366 d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 6367 return createResultValue( result, 6368 VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", 6369 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 6370 } 6371 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6372 6373 template <typename Dispatch> signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const6374 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( 6375 const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6376 { 6377 return static_cast<Result>( 6378 d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 6379 } 6380 6381 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6382 template <typename Dispatch> 6383 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphore(const SemaphoreSignalInfo & signalInfo,Dispatch const & d) const6384 Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 6385 { 6386 Result result = static_cast<Result>( 6387 d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 6388 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); 6389 } 6390 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6391 6392 template <typename Dispatch> getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const6393 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, 6394 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6395 { 6396 return static_cast<DeviceAddress>( 6397 d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 6398 } 6399 6400 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6401 template <typename Dispatch> getBufferAddress(const BufferDeviceAddressInfo & info,Dispatch const & d) const6402 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, 6403 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6404 { 6405 return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 6406 } 6407 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6408 6409 template <typename Dispatch> getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const6410 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( 6411 const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6412 { 6413 return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 6414 } 6415 6416 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6417 template <typename Dispatch> getBufferOpaqueCaptureAddress(const BufferDeviceAddressInfo & info,Dispatch const & d) const6418 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, 6419 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6420 { 6421 return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 6422 } 6423 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6424 6425 template <typename Dispatch> 6426 VULKAN_HPP_INLINE uint64_t getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const6427 Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 6428 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6429 { 6430 return d.vkGetDeviceMemoryOpaqueCaptureAddress( 6431 m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 6432 } 6433 6434 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6435 template <typename Dispatch> getMemoryOpaqueCaptureAddress(const DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const6436 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, 6437 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6438 { 6439 return d.vkGetDeviceMemoryOpaqueCaptureAddress( 6440 m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 6441 } 6442 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6443 6444 //=== VK_KHR_surface === 6445 6446 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6447 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6448 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6449 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6450 { 6451 d.vkDestroySurfaceKHR( 6452 m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6453 } 6454 6455 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6456 template <typename Dispatch> destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6457 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6458 Optional<const AllocationCallbacks> allocator, 6459 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6460 { 6461 d.vkDestroySurfaceKHR( m_instance, 6462 static_cast<VkSurfaceKHR>( surface ), 6463 reinterpret_cast<const VkAllocationCallbacks *>( 6464 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6465 } 6466 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6467 6468 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6469 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6470 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6471 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6472 { 6473 d.vkDestroySurfaceKHR( 6474 m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6475 } 6476 6477 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6478 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6479 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6480 Optional<const AllocationCallbacks> allocator, 6481 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6482 { 6483 d.vkDestroySurfaceKHR( m_instance, 6484 static_cast<VkSurfaceKHR>( surface ), 6485 reinterpret_cast<const VkAllocationCallbacks *>( 6486 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6487 } 6488 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6489 6490 template <typename Dispatch> 6491 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::Bool32 * pSupported,Dispatch const & d) const6492 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, 6493 VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6494 VULKAN_HPP_NAMESPACE::Bool32 * pSupported, 6495 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6496 { 6497 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, 6498 queueFamilyIndex, 6499 static_cast<VkSurfaceKHR>( surface ), 6500 reinterpret_cast<VkBool32 *>( pSupported ) ) ); 6501 } 6502 6503 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6504 template <typename Dispatch> 6505 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6506 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, 6507 VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6508 Dispatch const & d ) const 6509 { 6510 VULKAN_HPP_NAMESPACE::Bool32 supported; 6511 Result result = 6512 static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, 6513 queueFamilyIndex, 6514 static_cast<VkSurfaceKHR>( surface ), 6515 reinterpret_cast<VkBool32 *>( &supported ) ) ); 6516 return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); 6517 } 6518 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6519 6520 template <typename Dispatch> 6521 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,Dispatch const & d) const6522 PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6523 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, 6524 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6525 { 6526 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 6527 m_physicalDevice, 6528 static_cast<VkSurfaceKHR>( surface ), 6529 reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); 6530 } 6531 6532 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6533 template <typename Dispatch> 6534 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6535 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6536 PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 6537 { 6538 VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; 6539 Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 6540 m_physicalDevice, 6541 static_cast<VkSurfaceKHR>( surface ), 6542 reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) ); 6543 return createResultValue( 6544 result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); 6545 } 6546 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6547 6548 template <typename Dispatch> 6549 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,Dispatch const & d) const6550 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6551 uint32_t * pSurfaceFormatCount, 6552 VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, 6553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6554 { 6555 return static_cast<Result>( 6556 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, 6557 static_cast<VkSurfaceKHR>( surface ), 6558 pSurfaceFormatCount, 6559 reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); 6560 } 6561 6562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6563 template <typename SurfaceFormatKHRAllocator, typename Dispatch> 6564 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6565 typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6566 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 6567 { 6568 std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; 6569 uint32_t surfaceFormatCount; 6570 Result result; 6571 do 6572 { 6573 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 6574 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 6575 if ( ( result == Result::eSuccess ) && surfaceFormatCount ) 6576 { 6577 surfaceFormats.resize( surfaceFormatCount ); 6578 result = static_cast<Result>( 6579 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, 6580 static_cast<VkSurfaceKHR>( surface ), 6581 &surfaceFormatCount, 6582 reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 6583 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 6584 } 6585 } while ( result == Result::eIncomplete ); 6586 if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) 6587 { 6588 surfaceFormats.resize( surfaceFormatCount ); 6589 } 6590 return createResultValue( 6591 result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 6592 } 6593 6594 template <typename SurfaceFormatKHRAllocator, 6595 typename Dispatch, 6596 typename B, 6597 typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type> 6598 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6599 typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,Dispatch const & d) const6600 PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6601 SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, 6602 Dispatch const & d ) const 6603 { 6604 std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); 6605 uint32_t surfaceFormatCount; 6606 Result result; 6607 do 6608 { 6609 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( 6610 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); 6611 if ( ( result == Result::eSuccess ) && surfaceFormatCount ) 6612 { 6613 surfaceFormats.resize( surfaceFormatCount ); 6614 result = static_cast<Result>( 6615 d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, 6616 static_cast<VkSurfaceKHR>( surface ), 6617 &surfaceFormatCount, 6618 reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); 6619 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 6620 } 6621 } while ( result == Result::eIncomplete ); 6622 if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) 6623 { 6624 surfaceFormats.resize( surfaceFormatCount ); 6625 } 6626 return createResultValue( 6627 result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); 6628 } 6629 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6630 6631 template <typename Dispatch> 6632 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const6633 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6634 uint32_t * pPresentModeCount, 6635 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 6636 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6637 { 6638 return static_cast<Result>( 6639 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, 6640 static_cast<VkSurfaceKHR>( surface ), 6641 pPresentModeCount, 6642 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 6643 } 6644 6645 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6646 template <typename PresentModeKHRAllocator, typename Dispatch> 6647 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6648 typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6649 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 6650 { 6651 std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes; 6652 uint32_t presentModeCount; 6653 Result result; 6654 do 6655 { 6656 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 6657 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 6658 if ( ( result == Result::eSuccess ) && presentModeCount ) 6659 { 6660 presentModes.resize( presentModeCount ); 6661 result = static_cast<Result>( 6662 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, 6663 static_cast<VkSurfaceKHR>( surface ), 6664 &presentModeCount, 6665 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 6666 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 6667 } 6668 } while ( result == Result::eIncomplete ); 6669 if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) 6670 { 6671 presentModes.resize( presentModeCount ); 6672 } 6673 return createResultValue( 6674 result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 6675 } 6676 6677 template <typename PresentModeKHRAllocator, 6678 typename Dispatch, 6679 typename B, 6680 typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type> 6681 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 6682 typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const6683 PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6684 PresentModeKHRAllocator & presentModeKHRAllocator, 6685 Dispatch const & d ) const 6686 { 6687 std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 6688 uint32_t presentModeCount; 6689 Result result; 6690 do 6691 { 6692 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( 6693 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); 6694 if ( ( result == Result::eSuccess ) && presentModeCount ) 6695 { 6696 presentModes.resize( presentModeCount ); 6697 result = static_cast<Result>( 6698 d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, 6699 static_cast<VkSurfaceKHR>( surface ), 6700 &presentModeCount, 6701 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 6702 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 6703 } 6704 } while ( result == Result::eIncomplete ); 6705 if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) 6706 { 6707 presentModes.resize( presentModeCount ); 6708 } 6709 return createResultValue( 6710 result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); 6711 } 6712 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6713 6714 //=== VK_KHR_swapchain === 6715 6716 template <typename Dispatch> 6717 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,Dispatch const & d) const6718 Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, 6719 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6720 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, 6721 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6722 { 6723 return static_cast<Result>( 6724 d.vkCreateSwapchainKHR( m_device, 6725 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), 6726 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 6727 reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); 6728 } 6729 6730 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6731 template <typename Dispatch> 6732 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6733 typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR(const SwapchainCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6734 Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, 6735 Optional<const AllocationCallbacks> allocator, 6736 Dispatch const & d ) const 6737 { 6738 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 6739 Result result = static_cast<Result>( 6740 d.vkCreateSwapchainKHR( m_device, 6741 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 6742 reinterpret_cast<const VkAllocationCallbacks *>( 6743 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6744 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 6745 return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); 6746 } 6747 6748 # ifndef VULKAN_HPP_NO_SMART_HANDLE 6749 template <typename Dispatch> 6750 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6751 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique(const SwapchainCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6752 Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, 6753 Optional<const AllocationCallbacks> allocator, 6754 Dispatch const & d ) const 6755 { 6756 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; 6757 Result result = static_cast<Result>( 6758 d.vkCreateSwapchainKHR( m_device, 6759 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 6760 reinterpret_cast<const VkAllocationCallbacks *>( 6761 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 6762 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 6763 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 6764 return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( 6765 result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter ); 6766 } 6767 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 6768 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6769 6770 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6771 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6772 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6773 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6774 { 6775 d.vkDestroySwapchainKHR( m_device, 6776 static_cast<VkSwapchainKHR>( swapchain ), 6777 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6778 } 6779 6780 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6781 template <typename Dispatch> destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6782 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6783 Optional<const AllocationCallbacks> allocator, 6784 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6785 { 6786 d.vkDestroySwapchainKHR( m_device, 6787 static_cast<VkSwapchainKHR>( swapchain ), 6788 reinterpret_cast<const VkAllocationCallbacks *>( 6789 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6790 } 6791 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6792 6793 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const6794 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6795 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 6796 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6797 { 6798 d.vkDestroySwapchainKHR( m_device, 6799 static_cast<VkSwapchainKHR>( swapchain ), 6800 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 6801 } 6802 6803 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6804 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const6805 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6806 Optional<const AllocationCallbacks> allocator, 6807 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6808 { 6809 d.vkDestroySwapchainKHR( m_device, 6810 static_cast<VkSwapchainKHR>( swapchain ), 6811 reinterpret_cast<const VkAllocationCallbacks *>( 6812 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 6813 } 6814 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6815 6816 template <typename Dispatch> 6817 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,Dispatch const & d) const6818 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6819 uint32_t * pSwapchainImageCount, 6820 VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, 6821 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6822 { 6823 return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, 6824 static_cast<VkSwapchainKHR>( swapchain ), 6825 pSwapchainImageCount, 6826 reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); 6827 } 6828 6829 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6830 template <typename ImageAllocator, typename Dispatch> 6831 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const6832 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 6833 { 6834 std::vector<Image, ImageAllocator> swapchainImages; 6835 uint32_t swapchainImageCount; 6836 Result result; 6837 do 6838 { 6839 result = static_cast<Result>( d.vkGetSwapchainImagesKHR( 6840 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 6841 if ( ( result == Result::eSuccess ) && swapchainImageCount ) 6842 { 6843 swapchainImages.resize( swapchainImageCount ); 6844 result = 6845 static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, 6846 static_cast<VkSwapchainKHR>( swapchain ), 6847 &swapchainImageCount, 6848 reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 6849 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 6850 } 6851 } while ( result == Result::eIncomplete ); 6852 if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) ) 6853 { 6854 swapchainImages.resize( swapchainImageCount ); 6855 } 6856 return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 6857 } 6858 6859 template <typename ImageAllocator, 6860 typename Dispatch, 6861 typename B, 6862 typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type> 6863 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,ImageAllocator & imageAllocator,Dispatch const & d) const6864 Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6865 ImageAllocator & imageAllocator, 6866 Dispatch const & d ) const 6867 { 6868 std::vector<Image, ImageAllocator> swapchainImages( imageAllocator ); 6869 uint32_t swapchainImageCount; 6870 Result result; 6871 do 6872 { 6873 result = static_cast<Result>( d.vkGetSwapchainImagesKHR( 6874 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); 6875 if ( ( result == Result::eSuccess ) && swapchainImageCount ) 6876 { 6877 swapchainImages.resize( swapchainImageCount ); 6878 result = 6879 static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, 6880 static_cast<VkSwapchainKHR>( swapchain ), 6881 &swapchainImageCount, 6882 reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); 6883 VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); 6884 } 6885 } while ( result == Result::eIncomplete ); 6886 if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) ) 6887 { 6888 swapchainImages.resize( swapchainImageCount ); 6889 } 6890 return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); 6891 } 6892 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6893 6894 template <typename Dispatch> 6895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,uint32_t * pImageIndex,Dispatch const & d) const6896 Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6897 uint64_t timeout, 6898 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 6899 VULKAN_HPP_NAMESPACE::Fence fence, 6900 uint32_t * pImageIndex, 6901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6902 { 6903 return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, 6904 static_cast<VkSwapchainKHR>( swapchain ), 6905 timeout, 6906 static_cast<VkSemaphore>( semaphore ), 6907 static_cast<VkFence>( fence ), 6908 pImageIndex ) ); 6909 } 6910 6911 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6912 template <typename Dispatch> 6913 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const6914 Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 6915 uint64_t timeout, 6916 VULKAN_HPP_NAMESPACE::Semaphore semaphore, 6917 VULKAN_HPP_NAMESPACE::Fence fence, 6918 Dispatch const & d ) const 6919 { 6920 uint32_t imageIndex; 6921 Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, 6922 static_cast<VkSwapchainKHR>( swapchain ), 6923 timeout, 6924 static_cast<VkSemaphore>( semaphore ), 6925 static_cast<VkFence>( fence ), 6926 &imageIndex ) ); 6927 return createResultValue( result, 6928 imageIndex, 6929 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", 6930 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 6931 VULKAN_HPP_NAMESPACE::Result::eTimeout, 6932 VULKAN_HPP_NAMESPACE::Result::eNotReady, 6933 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 6934 } 6935 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6936 6937 template <typename Dispatch> presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,Dispatch const & d) const6938 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( 6939 const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6940 { 6941 return static_cast<Result>( 6942 d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); 6943 } 6944 6945 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6946 template <typename Dispatch> presentKHR(const PresentInfoKHR & presentInfo,Dispatch const & d) const6947 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, 6948 Dispatch const & d ) const 6949 { 6950 Result result = 6951 static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) ); 6952 return createResultValue( 6953 result, 6954 VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", 6955 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 6956 } 6957 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6958 6959 template <typename Dispatch> getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,Dispatch const & d) const6960 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( 6961 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, 6962 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6963 { 6964 return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( 6965 m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); 6966 } 6967 6968 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6969 template <typename Dispatch> 6970 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6971 typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const & d) const6972 Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const 6973 { 6974 VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; 6975 Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( 6976 m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) ); 6977 return createResultValue( 6978 result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); 6979 } 6980 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 6981 6982 template <typename Dispatch> 6983 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const6984 Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 6985 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 6986 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 6987 { 6988 return static_cast<Result>( 6989 d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, 6990 static_cast<VkSurfaceKHR>( surface ), 6991 reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 6992 } 6993 6994 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 6995 template <typename Dispatch> 6996 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 6997 typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6998 Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 6999 { 7000 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 7001 Result result = static_cast<Result>( 7002 d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, 7003 static_cast<VkSurfaceKHR>( surface ), 7004 reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 7005 return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); 7006 } 7007 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7008 7009 template <typename Dispatch> 7010 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pRectCount,VULKAN_HPP_NAMESPACE::Rect2D * pRects,Dispatch const & d) const7011 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7012 uint32_t * pRectCount, 7013 VULKAN_HPP_NAMESPACE::Rect2D * pRects, 7014 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7015 { 7016 return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 7017 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); 7018 } 7019 7020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7021 template <typename Rect2DAllocator, typename Dispatch> 7022 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const7023 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 7024 { 7025 std::vector<Rect2D, Rect2DAllocator> rects; 7026 uint32_t rectCount; 7027 Result result; 7028 do 7029 { 7030 result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 7031 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 7032 if ( ( result == Result::eSuccess ) && rectCount ) 7033 { 7034 rects.resize( rectCount ); 7035 result = static_cast<Result>( 7036 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, 7037 static_cast<VkSurfaceKHR>( surface ), 7038 &rectCount, 7039 reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 7040 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 7041 } 7042 } while ( result == Result::eIncomplete ); 7043 if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) ) 7044 { 7045 rects.resize( rectCount ); 7046 } 7047 return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 7048 } 7049 7050 template <typename Rect2DAllocator, 7051 typename Dispatch, 7052 typename B, 7053 typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type> 7054 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Rect2DAllocator & rect2DAllocator,Dispatch const & d) const7055 PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 7056 Rect2DAllocator & rect2DAllocator, 7057 Dispatch const & d ) const 7058 { 7059 std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator ); 7060 uint32_t rectCount; 7061 Result result; 7062 do 7063 { 7064 result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( 7065 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); 7066 if ( ( result == Result::eSuccess ) && rectCount ) 7067 { 7068 rects.resize( rectCount ); 7069 result = static_cast<Result>( 7070 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, 7071 static_cast<VkSurfaceKHR>( surface ), 7072 &rectCount, 7073 reinterpret_cast<VkRect2D *>( rects.data() ) ) ); 7074 VULKAN_HPP_ASSERT( rectCount <= rects.size() ); 7075 } 7076 } while ( result == Result::eIncomplete ); 7077 if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) ) 7078 { 7079 rects.resize( rectCount ); 7080 } 7081 return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); 7082 } 7083 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7084 7085 template <typename Dispatch> 7086 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,Dispatch const & d) const7087 Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, 7088 uint32_t * pImageIndex, 7089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7090 { 7091 return static_cast<Result>( d.vkAcquireNextImage2KHR( 7092 m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); 7093 } 7094 7095 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7096 template <typename Dispatch> 7097 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> acquireNextImage2KHR(const AcquireNextImageInfoKHR & acquireInfo,Dispatch const & d) const7098 Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const 7099 { 7100 uint32_t imageIndex; 7101 Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( 7102 m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) ); 7103 return createResultValue( result, 7104 imageIndex, 7105 VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", 7106 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 7107 VULKAN_HPP_NAMESPACE::Result::eTimeout, 7108 VULKAN_HPP_NAMESPACE::Result::eNotReady, 7109 VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 7110 } 7111 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7112 7113 //=== VK_KHR_display === 7114 7115 template <typename Dispatch> 7116 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,Dispatch const & d) const7117 PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, 7118 VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, 7119 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7120 { 7121 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( 7122 m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); 7123 } 7124 7125 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7126 template <typename DisplayPropertiesKHRAllocator, typename Dispatch> 7127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7128 typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(Dispatch const & d) const7129 PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const 7130 { 7131 std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; 7132 uint32_t propertyCount; 7133 Result result; 7134 do 7135 { 7136 result = 7137 static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 7138 if ( ( result == Result::eSuccess ) && propertyCount ) 7139 { 7140 properties.resize( propertyCount ); 7141 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( 7142 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 7143 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 7144 } 7145 } while ( result == Result::eIncomplete ); 7146 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 7147 { 7148 properties.resize( propertyCount ); 7149 } 7150 return createResultValue( 7151 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 7152 } 7153 7154 template <typename DisplayPropertiesKHRAllocator, 7155 typename Dispatch, 7156 typename B, 7157 typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type> 7158 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7159 typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR(DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,Dispatch const & d) const7160 PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, 7161 Dispatch const & d ) const 7162 { 7163 std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); 7164 uint32_t propertyCount; 7165 Result result; 7166 do 7167 { 7168 result = 7169 static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 7170 if ( ( result == Result::eSuccess ) && propertyCount ) 7171 { 7172 properties.resize( propertyCount ); 7173 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( 7174 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); 7175 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 7176 } 7177 } while ( result == Result::eIncomplete ); 7178 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 7179 { 7180 properties.resize( propertyCount ); 7181 } 7182 return createResultValue( 7183 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); 7184 } 7185 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7186 7187 template <typename Dispatch> 7188 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlanePropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,Dispatch const & d) const7189 PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, 7190 VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, 7191 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7192 { 7193 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 7194 m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); 7195 } 7196 7197 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7198 template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch> 7199 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7200 typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(Dispatch const & d) const7201 PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const 7202 { 7203 std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; 7204 uint32_t propertyCount; 7205 Result result; 7206 do 7207 { 7208 result = static_cast<Result>( 7209 d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 7210 if ( ( result == Result::eSuccess ) && propertyCount ) 7211 { 7212 properties.resize( propertyCount ); 7213 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 7214 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 7215 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 7216 } 7217 } while ( result == Result::eIncomplete ); 7218 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 7219 { 7220 properties.resize( propertyCount ); 7221 } 7222 return createResultValue( 7223 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 7224 } 7225 7226 template <typename DisplayPlanePropertiesKHRAllocator, 7227 typename Dispatch, 7228 typename B, 7229 typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type> 7230 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7231 typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR(DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,Dispatch const & d) const7232 PhysicalDevice::getDisplayPlanePropertiesKHR( 7233 DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const 7234 { 7235 std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( 7236 displayPlanePropertiesKHRAllocator ); 7237 uint32_t propertyCount; 7238 Result result; 7239 do 7240 { 7241 result = static_cast<Result>( 7242 d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); 7243 if ( ( result == Result::eSuccess ) && propertyCount ) 7244 { 7245 properties.resize( propertyCount ); 7246 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( 7247 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); 7248 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 7249 } 7250 } while ( result == Result::eIncomplete ); 7251 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 7252 { 7253 properties.resize( propertyCount ); 7254 } 7255 return createResultValue( 7256 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); 7257 } 7258 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7259 7260 template <typename Dispatch> 7261 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,uint32_t * pDisplayCount,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,Dispatch const & d) const7262 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, 7263 uint32_t * pDisplayCount, 7264 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, 7265 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7266 { 7267 return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( 7268 m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); 7269 } 7270 7271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7272 template <typename DisplayKHRAllocator, typename Dispatch> 7273 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,Dispatch const & d) const7274 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const 7275 { 7276 std::vector<DisplayKHR, DisplayKHRAllocator> displays; 7277 uint32_t displayCount; 7278 Result result; 7279 do 7280 { 7281 result = static_cast<Result>( 7282 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 7283 if ( ( result == Result::eSuccess ) && displayCount ) 7284 { 7285 displays.resize( displayCount ); 7286 result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( 7287 m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 7288 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 7289 } 7290 } while ( result == Result::eIncomplete ); 7291 if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) ) 7292 { 7293 displays.resize( displayCount ); 7294 } 7295 return createResultValue( 7296 result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 7297 } 7298 7299 template <typename DisplayKHRAllocator, 7300 typename Dispatch, 7301 typename B, 7302 typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type> 7303 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,DisplayKHRAllocator & displayKHRAllocator,Dispatch const & d) const7304 PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, 7305 DisplayKHRAllocator & displayKHRAllocator, 7306 Dispatch const & d ) const 7307 { 7308 std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); 7309 uint32_t displayCount; 7310 Result result; 7311 do 7312 { 7313 result = static_cast<Result>( 7314 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); 7315 if ( ( result == Result::eSuccess ) && displayCount ) 7316 { 7317 displays.resize( displayCount ); 7318 result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( 7319 m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); 7320 VULKAN_HPP_ASSERT( displayCount <= displays.size() ); 7321 } 7322 } while ( result == Result::eIncomplete ); 7323 if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) ) 7324 { 7325 displays.resize( displayCount ); 7326 } 7327 return createResultValue( 7328 result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); 7329 } 7330 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7331 7332 template <typename Dispatch> 7333 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,Dispatch const & d) const7334 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 7335 uint32_t * pPropertyCount, 7336 VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, 7337 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7338 { 7339 return static_cast<Result>( 7340 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, 7341 static_cast<VkDisplayKHR>( display ), 7342 pPropertyCount, 7343 reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); 7344 } 7345 7346 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7347 template <typename DisplayModePropertiesKHRAllocator, typename Dispatch> 7348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7349 typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const7350 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 7351 { 7352 std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; 7353 uint32_t propertyCount; 7354 Result result; 7355 do 7356 { 7357 result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( 7358 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 7359 if ( ( result == Result::eSuccess ) && propertyCount ) 7360 { 7361 properties.resize( propertyCount ); 7362 result = static_cast<Result>( 7363 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, 7364 static_cast<VkDisplayKHR>( display ), 7365 &propertyCount, 7366 reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 7367 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 7368 } 7369 } while ( result == Result::eIncomplete ); 7370 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 7371 { 7372 properties.resize( propertyCount ); 7373 } 7374 return createResultValue( 7375 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 7376 } 7377 7378 template <typename DisplayModePropertiesKHRAllocator, 7379 typename Dispatch, 7380 typename B, 7381 typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type> 7382 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 7383 typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,Dispatch const & d) const7384 PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 7385 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, 7386 Dispatch const & d ) const 7387 { 7388 std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( 7389 displayModePropertiesKHRAllocator ); 7390 uint32_t propertyCount; 7391 Result result; 7392 do 7393 { 7394 result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( 7395 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 7396 if ( ( result == Result::eSuccess ) && propertyCount ) 7397 { 7398 properties.resize( propertyCount ); 7399 result = static_cast<Result>( 7400 d.vkGetDisplayModePropertiesKHR( m_physicalDevice, 7401 static_cast<VkDisplayKHR>( display ), 7402 &propertyCount, 7403 reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); 7404 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 7405 } 7406 } while ( result == Result::eIncomplete ); 7407 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 7408 { 7409 properties.resize( propertyCount ); 7410 } 7411 return createResultValue( 7412 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); 7413 } 7414 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7415 7416 template <typename Dispatch> 7417 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,Dispatch const & d) const7418 PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 7419 const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, 7420 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7421 VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, 7422 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7423 { 7424 return static_cast<Result>( 7425 d.vkCreateDisplayModeKHR( m_physicalDevice, 7426 static_cast<VkDisplayKHR>( display ), 7427 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), 7428 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7429 reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); 7430 } 7431 7432 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7433 template <typename Dispatch> 7434 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7435 typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const DisplayModeCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7436 PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 7437 const DisplayModeCreateInfoKHR & createInfo, 7438 Optional<const AllocationCallbacks> allocator, 7439 Dispatch const & d ) const 7440 { 7441 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 7442 Result result = static_cast<Result>( 7443 d.vkCreateDisplayModeKHR( m_physicalDevice, 7444 static_cast<VkDisplayKHR>( display ), 7445 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 7446 reinterpret_cast<const VkAllocationCallbacks *>( 7447 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7448 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 7449 return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); 7450 } 7451 7452 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7453 template <typename Dispatch> 7454 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7455 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const DisplayModeCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7456 PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 7457 const DisplayModeCreateInfoKHR & createInfo, 7458 Optional<const AllocationCallbacks> allocator, 7459 Dispatch const & d ) const 7460 { 7461 VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; 7462 Result result = static_cast<Result>( 7463 d.vkCreateDisplayModeKHR( m_physicalDevice, 7464 static_cast<VkDisplayKHR>( display ), 7465 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), 7466 reinterpret_cast<const VkAllocationCallbacks *>( 7467 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7468 reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); 7469 ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d ); 7470 return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( 7471 result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter ); 7472 } 7473 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 7474 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7475 7476 template <typename Dispatch> 7477 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,Dispatch const & d) const7478 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, 7479 uint32_t planeIndex, 7480 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, 7481 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7482 { 7483 return static_cast<Result>( 7484 d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, 7485 static_cast<VkDisplayModeKHR>( mode ), 7486 planeIndex, 7487 reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); 7488 } 7489 7490 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7491 template <typename Dispatch> 7492 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7493 typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,Dispatch const & d) const7494 PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, 7495 uint32_t planeIndex, 7496 Dispatch const & d ) const 7497 { 7498 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; 7499 Result result = static_cast<Result>( 7500 d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, 7501 static_cast<VkDisplayModeKHR>( mode ), 7502 planeIndex, 7503 reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) ); 7504 return createResultValue( 7505 result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); 7506 } 7507 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7508 7509 template <typename Dispatch> 7510 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const7511 Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, 7512 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7513 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 7514 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7515 { 7516 return static_cast<Result>( 7517 d.vkCreateDisplayPlaneSurfaceKHR( m_instance, 7518 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), 7519 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7520 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 7521 } 7522 7523 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7524 template <typename Dispatch> 7525 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7526 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR(const DisplaySurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7527 Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, 7528 Optional<const AllocationCallbacks> allocator, 7529 Dispatch const & d ) const 7530 { 7531 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7532 Result result = static_cast<Result>( 7533 d.vkCreateDisplayPlaneSurfaceKHR( m_instance, 7534 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 7535 reinterpret_cast<const VkAllocationCallbacks *>( 7536 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7537 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7538 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); 7539 } 7540 7541 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7542 template <typename Dispatch> 7543 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7544 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique(const DisplaySurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7545 Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, 7546 Optional<const AllocationCallbacks> allocator, 7547 Dispatch const & d ) const 7548 { 7549 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7550 Result result = static_cast<Result>( 7551 d.vkCreateDisplayPlaneSurfaceKHR( m_instance, 7552 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), 7553 reinterpret_cast<const VkAllocationCallbacks *>( 7554 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7555 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7556 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 7557 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 7558 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); 7559 } 7560 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 7561 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7562 7563 //=== VK_KHR_display_swapchain === 7564 7565 template <typename Dispatch> 7566 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSharedSwapchainsKHR(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,Dispatch const & d) const7567 Device::createSharedSwapchainsKHR( uint32_t swapchainCount, 7568 const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, 7569 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7570 VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 7571 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7572 { 7573 return static_cast<Result>( 7574 d.vkCreateSharedSwapchainsKHR( m_device, 7575 swapchainCount, 7576 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), 7577 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7578 reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); 7579 } 7580 7581 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7582 template <typename SwapchainKHRAllocator, typename Dispatch> 7583 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7584 typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7585 Device::createSharedSwapchainsKHR( 7586 ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 7587 Optional<const AllocationCallbacks> allocator, 7588 Dispatch const & d ) const 7589 { 7590 std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); 7591 Result result = static_cast<Result>( 7592 d.vkCreateSharedSwapchainsKHR( m_device, 7593 createInfos.size(), 7594 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 7595 reinterpret_cast<const VkAllocationCallbacks *>( 7596 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7597 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 7598 return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 7599 } 7600 7601 template <typename SwapchainKHRAllocator, 7602 typename Dispatch, 7603 typename B, 7604 typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type> 7605 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7606 typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const7607 Device::createSharedSwapchainsKHR( 7608 ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 7609 Optional<const AllocationCallbacks> allocator, 7610 SwapchainKHRAllocator & swapchainKHRAllocator, 7611 Dispatch const & d ) const 7612 { 7613 std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); 7614 Result result = static_cast<Result>( 7615 d.vkCreateSharedSwapchainsKHR( m_device, 7616 createInfos.size(), 7617 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 7618 reinterpret_cast<const VkAllocationCallbacks *>( 7619 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7620 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 7621 return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); 7622 } 7623 7624 template <typename Dispatch> 7625 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7626 Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 7627 Optional<const AllocationCallbacks> allocator, 7628 Dispatch const & d ) const 7629 { 7630 SwapchainKHR swapchain; 7631 Result result = static_cast<Result>( 7632 d.vkCreateSharedSwapchainsKHR( m_device, 7633 1, 7634 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 7635 reinterpret_cast<const VkAllocationCallbacks *>( 7636 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7637 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 7638 return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); 7639 } 7640 7641 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7642 template <typename Dispatch, typename SwapchainKHRAllocator> 7643 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7644 typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7645 Device::createSharedSwapchainsKHRUnique( 7646 ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 7647 Optional<const AllocationCallbacks> allocator, 7648 Dispatch const & d ) const 7649 { 7650 std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; 7651 std::vector<SwapchainKHR> swapchains( createInfos.size() ); 7652 Result result = static_cast<Result>( 7653 d.vkCreateSharedSwapchainsKHR( m_device, 7654 createInfos.size(), 7655 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 7656 reinterpret_cast<const VkAllocationCallbacks *>( 7657 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7658 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 7659 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 7660 { 7661 uniqueSwapchains.reserve( createInfos.size() ); 7662 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 7663 for ( size_t i = 0; i < createInfos.size(); i++ ) 7664 { 7665 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) ); 7666 } 7667 } 7668 return createResultValue( 7669 result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 7670 } 7671 7672 template <typename Dispatch, 7673 typename SwapchainKHRAllocator, 7674 typename B, 7675 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, 7676 int>::type> 7677 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7678 typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique(ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const7679 Device::createSharedSwapchainsKHRUnique( 7680 ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, 7681 Optional<const AllocationCallbacks> allocator, 7682 SwapchainKHRAllocator & swapchainKHRAllocator, 7683 Dispatch const & d ) const 7684 { 7685 std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); 7686 std::vector<SwapchainKHR> swapchains( createInfos.size() ); 7687 Result result = static_cast<Result>( 7688 d.vkCreateSharedSwapchainsKHR( m_device, 7689 createInfos.size(), 7690 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), 7691 reinterpret_cast<const VkAllocationCallbacks *>( 7692 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7693 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); 7694 if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) 7695 { 7696 uniqueSwapchains.reserve( createInfos.size() ); 7697 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 7698 for ( size_t i = 0; i < createInfos.size(); i++ ) 7699 { 7700 uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) ); 7701 } 7702 } 7703 return createResultValue( 7704 result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); 7705 } 7706 7707 template <typename Dispatch> 7708 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7709 typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7710 Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, 7711 Optional<const AllocationCallbacks> allocator, 7712 Dispatch const & d ) const 7713 { 7714 SwapchainKHR swapchain; 7715 Result result = static_cast<Result>( 7716 d.vkCreateSharedSwapchainsKHR( m_device, 7717 1, 7718 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), 7719 reinterpret_cast<const VkAllocationCallbacks *>( 7720 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7721 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); 7722 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 7723 return createResultValue<SwapchainKHR, Dispatch>( 7724 result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter ); 7725 } 7726 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 7727 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7728 7729 #if defined( VK_USE_PLATFORM_XLIB_KHR ) 7730 //=== VK_KHR_xlib_surface === 7731 7732 template <typename Dispatch> 7733 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createXlibSurfaceKHR(const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const7734 Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, 7735 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7736 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 7737 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7738 { 7739 return static_cast<Result>( 7740 d.vkCreateXlibSurfaceKHR( m_instance, 7741 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), 7742 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7743 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 7744 } 7745 7746 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7747 template <typename Dispatch> 7748 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7749 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR(const XlibSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7750 Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, 7751 Optional<const AllocationCallbacks> allocator, 7752 Dispatch const & d ) const 7753 { 7754 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7755 Result result = static_cast<Result>( 7756 d.vkCreateXlibSurfaceKHR( m_instance, 7757 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 7758 reinterpret_cast<const VkAllocationCallbacks *>( 7759 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7760 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7761 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); 7762 } 7763 7764 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7765 template <typename Dispatch> 7766 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7767 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique(const XlibSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7768 Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, 7769 Optional<const AllocationCallbacks> allocator, 7770 Dispatch const & d ) const 7771 { 7772 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7773 Result result = static_cast<Result>( 7774 d.vkCreateXlibSurfaceKHR( m_instance, 7775 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), 7776 reinterpret_cast<const VkAllocationCallbacks *>( 7777 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7778 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7779 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 7780 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 7781 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter ); 7782 } 7783 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 7784 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7785 7786 template <typename Dispatch> getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display * dpy,VisualID visualID,Dispatch const & d) const7787 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, 7788 Display * dpy, 7789 VisualID visualID, 7790 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7791 { 7792 return static_cast<Bool32>( 7793 d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); 7794 } 7795 7796 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7797 template <typename Dispatch> getXlibPresentationSupportKHR(uint32_t queueFamilyIndex,Display & dpy,VisualID visualID,Dispatch const & d) const7798 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, 7799 Display & dpy, 7800 VisualID visualID, 7801 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7802 { 7803 return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); 7804 } 7805 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7806 #endif /*VK_USE_PLATFORM_XLIB_KHR*/ 7807 7808 #if defined( VK_USE_PLATFORM_XCB_KHR ) 7809 //=== VK_KHR_xcb_surface === 7810 7811 template <typename Dispatch> 7812 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createXcbSurfaceKHR(const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const7813 Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, 7814 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7815 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 7816 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7817 { 7818 return static_cast<Result>( 7819 d.vkCreateXcbSurfaceKHR( m_instance, 7820 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), 7821 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7822 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 7823 } 7824 7825 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7826 template <typename Dispatch> 7827 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7828 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR(const XcbSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7829 Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, 7830 Optional<const AllocationCallbacks> allocator, 7831 Dispatch const & d ) const 7832 { 7833 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7834 Result result = static_cast<Result>( 7835 d.vkCreateXcbSurfaceKHR( m_instance, 7836 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 7837 reinterpret_cast<const VkAllocationCallbacks *>( 7838 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7839 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7840 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); 7841 } 7842 7843 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7844 template <typename Dispatch> 7845 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7846 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique(const XcbSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7847 Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, 7848 Optional<const AllocationCallbacks> allocator, 7849 Dispatch const & d ) const 7850 { 7851 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7852 Result result = static_cast<Result>( 7853 d.vkCreateXcbSurfaceKHR( m_instance, 7854 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), 7855 reinterpret_cast<const VkAllocationCallbacks *>( 7856 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7857 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7858 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 7859 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 7860 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter ); 7861 } 7862 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 7863 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7864 7865 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id,Dispatch const & d) const7866 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 7867 xcb_connection_t * connection, 7868 xcb_visualid_t visual_id, 7869 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7870 { 7871 return static_cast<Bool32>( 7872 d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); 7873 } 7874 7875 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7876 template <typename Dispatch> getXcbPresentationSupportKHR(uint32_t queueFamilyIndex,xcb_connection_t & connection,xcb_visualid_t visual_id,Dispatch const & d) const7877 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, 7878 xcb_connection_t & connection, 7879 xcb_visualid_t visual_id, 7880 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7881 { 7882 return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); 7883 } 7884 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7885 #endif /*VK_USE_PLATFORM_XCB_KHR*/ 7886 7887 #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) 7888 //=== VK_KHR_wayland_surface === 7889 7890 template <typename Dispatch> 7891 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createWaylandSurfaceKHR(const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const7892 Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, 7893 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7894 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 7895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7896 { 7897 return static_cast<Result>( 7898 d.vkCreateWaylandSurfaceKHR( m_instance, 7899 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), 7900 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7901 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 7902 } 7903 7904 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7905 template <typename Dispatch> 7906 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7907 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR(const WaylandSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7908 Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, 7909 Optional<const AllocationCallbacks> allocator, 7910 Dispatch const & d ) const 7911 { 7912 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7913 Result result = static_cast<Result>( 7914 d.vkCreateWaylandSurfaceKHR( m_instance, 7915 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 7916 reinterpret_cast<const VkAllocationCallbacks *>( 7917 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7918 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7919 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); 7920 } 7921 7922 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7923 template <typename Dispatch> 7924 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7925 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique(const WaylandSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7926 Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, 7927 Optional<const AllocationCallbacks> allocator, 7928 Dispatch const & d ) const 7929 { 7930 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7931 Result result = static_cast<Result>( 7932 d.vkCreateWaylandSurfaceKHR( m_instance, 7933 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), 7934 reinterpret_cast<const VkAllocationCallbacks *>( 7935 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7936 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7937 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 7938 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 7939 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter ); 7940 } 7941 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 7942 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7943 7944 template <typename Dispatch> getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display * display,Dispatch const & d) const7945 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( 7946 uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7947 { 7948 return static_cast<Bool32>( 7949 d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); 7950 } 7951 7952 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7953 template <typename Dispatch> getWaylandPresentationSupportKHR(uint32_t queueFamilyIndex,struct wl_display & display,Dispatch const & d) const7954 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( 7955 uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7956 { 7957 return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); 7958 } 7959 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 7960 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ 7961 7962 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 7963 //=== VK_KHR_android_surface === 7964 7965 template <typename Dispatch> 7966 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAndroidSurfaceKHR(const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const7967 Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, 7968 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 7969 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 7970 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 7971 { 7972 return static_cast<Result>( 7973 d.vkCreateAndroidSurfaceKHR( m_instance, 7974 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), 7975 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 7976 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 7977 } 7978 7979 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 7980 template <typename Dispatch> 7981 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 7982 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR(const AndroidSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const7983 Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, 7984 Optional<const AllocationCallbacks> allocator, 7985 Dispatch const & d ) const 7986 { 7987 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 7988 Result result = static_cast<Result>( 7989 d.vkCreateAndroidSurfaceKHR( m_instance, 7990 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 7991 reinterpret_cast<const VkAllocationCallbacks *>( 7992 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 7993 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 7994 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); 7995 } 7996 7997 # ifndef VULKAN_HPP_NO_SMART_HANDLE 7998 template <typename Dispatch> 7999 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8000 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique(const AndroidSurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8001 Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, 8002 Optional<const AllocationCallbacks> allocator, 8003 Dispatch const & d ) const 8004 { 8005 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8006 Result result = static_cast<Result>( 8007 d.vkCreateAndroidSurfaceKHR( m_instance, 8008 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), 8009 reinterpret_cast<const VkAllocationCallbacks *>( 8010 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8011 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8012 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 8013 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 8014 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter ); 8015 } 8016 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 8017 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8018 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 8019 8020 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 8021 //=== VK_KHR_win32_surface === 8022 8023 template <typename Dispatch> 8024 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createWin32SurfaceKHR(const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8025 Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, 8026 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8027 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 8028 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8029 { 8030 return static_cast<Result>( 8031 d.vkCreateWin32SurfaceKHR( m_instance, 8032 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), 8033 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8034 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 8035 } 8036 8037 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8038 template <typename Dispatch> 8039 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8040 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR(const Win32SurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8041 Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, 8042 Optional<const AllocationCallbacks> allocator, 8043 Dispatch const & d ) const 8044 { 8045 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8046 Result result = static_cast<Result>( 8047 d.vkCreateWin32SurfaceKHR( m_instance, 8048 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 8049 reinterpret_cast<const VkAllocationCallbacks *>( 8050 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8051 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8052 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); 8053 } 8054 8055 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8056 template <typename Dispatch> 8057 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8058 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique(const Win32SurfaceCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8059 Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, 8060 Optional<const AllocationCallbacks> allocator, 8061 Dispatch const & d ) const 8062 { 8063 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 8064 Result result = static_cast<Result>( 8065 d.vkCreateWin32SurfaceKHR( m_instance, 8066 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), 8067 reinterpret_cast<const VkAllocationCallbacks *>( 8068 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8069 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 8070 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 8071 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 8072 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter ); 8073 } 8074 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 8075 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8076 8077 template <typename Dispatch> getWin32PresentationSupportKHR(uint32_t queueFamilyIndex,Dispatch const & d) const8078 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( 8079 uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8080 { 8081 return static_cast<Bool32>( 8082 d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); 8083 } 8084 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 8085 8086 //=== VK_EXT_debug_report === 8087 8088 template <typename Dispatch> 8089 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,Dispatch const & d) const8090 Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, 8091 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8092 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, 8093 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8094 { 8095 return static_cast<Result>( 8096 d.vkCreateDebugReportCallbackEXT( m_instance, 8097 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), 8098 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8099 reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); 8100 } 8101 8102 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8103 template <typename Dispatch> 8104 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(const DebugReportCallbackCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8105 Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, 8106 Optional<const AllocationCallbacks> allocator, 8107 Dispatch const & d ) const 8108 { 8109 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 8110 Result result = static_cast<Result>( 8111 d.vkCreateDebugReportCallbackEXT( m_instance, 8112 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 8113 reinterpret_cast<const VkAllocationCallbacks *>( 8114 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8115 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 8116 return createResultValue( 8117 result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); 8118 } 8119 8120 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8121 template <typename Dispatch> 8122 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique(const DebugReportCallbackCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8123 Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, 8124 Optional<const AllocationCallbacks> allocator, 8125 Dispatch const & d ) const 8126 { 8127 VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; 8128 Result result = static_cast<Result>( 8129 d.vkCreateDebugReportCallbackEXT( m_instance, 8130 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), 8131 reinterpret_cast<const VkAllocationCallbacks *>( 8132 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8133 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); 8134 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 8135 return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( 8136 result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter ); 8137 } 8138 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 8139 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8140 8141 template <typename Dispatch> 8142 VULKAN_HPP_INLINE void destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8143 Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 8144 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8145 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8146 { 8147 d.vkDestroyDebugReportCallbackEXT( m_instance, 8148 static_cast<VkDebugReportCallbackEXT>( callback ), 8149 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8150 } 8151 8152 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8153 template <typename Dispatch> destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8154 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 8155 Optional<const AllocationCallbacks> allocator, 8156 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8157 { 8158 d.vkDestroyDebugReportCallbackEXT( 8159 m_instance, 8160 static_cast<VkDebugReportCallbackEXT>( callback ), 8161 reinterpret_cast<const VkAllocationCallbacks *>( 8162 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8163 } 8164 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8165 8166 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8167 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 8168 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8170 { 8171 d.vkDestroyDebugReportCallbackEXT( m_instance, 8172 static_cast<VkDebugReportCallbackEXT>( callback ), 8173 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8174 } 8175 8176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8177 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8178 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, 8179 Optional<const AllocationCallbacks> allocator, 8180 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8181 { 8182 d.vkDestroyDebugReportCallbackEXT( 8183 m_instance, 8184 static_cast<VkDebugReportCallbackEXT>( callback ), 8185 reinterpret_cast<const VkAllocationCallbacks *>( 8186 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8187 } 8188 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8189 8190 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,Dispatch const & d) const8191 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 8192 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, 8193 uint64_t object, 8194 size_t location, 8195 int32_t messageCode, 8196 const char * pLayerPrefix, 8197 const char * pMessage, 8198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8199 { 8200 d.vkDebugReportMessageEXT( m_instance, 8201 static_cast<VkDebugReportFlagsEXT>( flags ), 8202 static_cast<VkDebugReportObjectTypeEXT>( objectType ), 8203 object, 8204 location, 8205 messageCode, 8206 pLayerPrefix, 8207 pMessage ); 8208 } 8209 8210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8211 template <typename Dispatch> debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const std::string & layerPrefix,const std::string & message,Dispatch const & d) const8212 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, 8213 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, 8214 uint64_t object, 8215 size_t location, 8216 int32_t messageCode, 8217 const std::string & layerPrefix, 8218 const std::string & message, 8219 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8220 { 8221 d.vkDebugReportMessageEXT( m_instance, 8222 static_cast<VkDebugReportFlagsEXT>( flags ), 8223 static_cast<VkDebugReportObjectTypeEXT>( objectType ), 8224 object, 8225 location, 8226 messageCode, 8227 layerPrefix.c_str(), 8228 message.c_str() ); 8229 } 8230 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8231 8232 //=== VK_EXT_debug_marker === 8233 8234 template <typename Dispatch> debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,Dispatch const & d) const8235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( 8236 const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8237 { 8238 return static_cast<Result>( 8239 d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); 8240 } 8241 8242 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8243 template <typename Dispatch> 8244 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectTagEXT(const DebugMarkerObjectTagInfoEXT & tagInfo,Dispatch const & d) const8245 Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 8246 { 8247 Result result = static_cast<Result>( 8248 d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) ); 8249 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); 8250 } 8251 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8252 8253 template <typename Dispatch> debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,Dispatch const & d) const8254 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( 8255 const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8256 { 8257 return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( 8258 m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); 8259 } 8260 8261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8262 template <typename Dispatch> 8263 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type debugMarkerSetObjectNameEXT(const DebugMarkerObjectNameInfoEXT & nameInfo,Dispatch const & d) const8264 Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 8265 { 8266 Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( 8267 m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) ); 8268 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); 8269 } 8270 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8271 8272 template <typename Dispatch> 8273 VULKAN_HPP_INLINE void debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const8274 CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 8275 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8276 { 8277 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 8278 } 8279 8280 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8281 template <typename Dispatch> debugMarkerBeginEXT(const DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const8282 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, 8283 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8284 { 8285 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 8286 } 8287 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8288 8289 template <typename Dispatch> debugMarkerEndEXT(Dispatch const & d) const8290 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8291 { 8292 d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); 8293 } 8294 8295 template <typename Dispatch> 8296 VULKAN_HPP_INLINE void debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,Dispatch const & d) const8297 CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, 8298 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8299 { 8300 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); 8301 } 8302 8303 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8304 template <typename Dispatch> debugMarkerInsertEXT(const DebugMarkerMarkerInfoEXT & markerInfo,Dispatch const & d) const8305 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, 8306 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8307 { 8308 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); 8309 } 8310 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8311 8312 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 8313 //=== VK_KHR_video_queue === 8314 8315 template <typename Dispatch> 8316 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getVideoCapabilitiesKHR(const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,Dispatch const & d) const8317 PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, 8318 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, 8319 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8320 { 8321 return static_cast<Result>( 8322 d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, 8323 reinterpret_cast<const VkVideoProfileKHR *>( pVideoProfile ), 8324 reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); 8325 } 8326 8327 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8328 template <typename Dispatch> 8329 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8330 typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type getVideoCapabilitiesKHR(const VideoProfileKHR & videoProfile,Dispatch const & d) const8331 PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const 8332 { 8333 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; 8334 Result result = static_cast<Result>( 8335 d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, 8336 reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ), 8337 reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 8338 return createResultValue( 8339 result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 8340 } 8341 8342 template <typename X, typename Y, typename... Z, typename Dispatch> 8343 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type getVideoCapabilitiesKHR(const VideoProfileKHR & videoProfile,Dispatch const & d) const8344 PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const 8345 { 8346 StructureChain<X, Y, Z...> structureChain; 8347 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = 8348 structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>(); 8349 Result result = static_cast<Result>( 8350 d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, 8351 reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ), 8352 reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); 8353 return createResultValue( 8354 result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); 8355 } 8356 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8357 8358 template <typename Dispatch> getVideoFormatPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,uint32_t * pVideoFormatPropertyCount,VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,Dispatch const & d) const8359 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( 8360 const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, 8361 uint32_t * pVideoFormatPropertyCount, 8362 VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, 8363 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8364 { 8365 return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 8366 m_physicalDevice, 8367 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), 8368 pVideoFormatPropertyCount, 8369 reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); 8370 } 8371 8372 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8373 template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch> 8374 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8375 typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,Dispatch const & d) const8376 PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 8377 Dispatch const & d ) const 8378 { 8379 std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; 8380 uint32_t videoFormatPropertyCount; 8381 Result result; 8382 do 8383 { 8384 result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 8385 m_physicalDevice, 8386 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 8387 &videoFormatPropertyCount, 8388 nullptr ) ); 8389 if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) 8390 { 8391 videoFormatProperties.resize( videoFormatPropertyCount ); 8392 result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 8393 m_physicalDevice, 8394 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 8395 &videoFormatPropertyCount, 8396 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 8397 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 8398 } 8399 } while ( result == Result::eIncomplete ); 8400 if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) ) 8401 { 8402 videoFormatProperties.resize( videoFormatPropertyCount ); 8403 } 8404 return createResultValue( 8405 result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 8406 } 8407 8408 template <typename VideoFormatPropertiesKHRAllocator, 8409 typename Dispatch, 8410 typename B, 8411 typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type> 8412 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8413 typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type getVideoFormatPropertiesKHR(const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,Dispatch const & d) const8414 PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, 8415 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, 8416 Dispatch const & d ) const 8417 { 8418 std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( 8419 videoFormatPropertiesKHRAllocator ); 8420 uint32_t videoFormatPropertyCount; 8421 Result result; 8422 do 8423 { 8424 result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 8425 m_physicalDevice, 8426 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 8427 &videoFormatPropertyCount, 8428 nullptr ) ); 8429 if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) 8430 { 8431 videoFormatProperties.resize( videoFormatPropertyCount ); 8432 result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( 8433 m_physicalDevice, 8434 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), 8435 &videoFormatPropertyCount, 8436 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); 8437 VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); 8438 } 8439 } while ( result == Result::eIncomplete ); 8440 if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) ) 8441 { 8442 videoFormatProperties.resize( videoFormatPropertyCount ); 8443 } 8444 return createResultValue( 8445 result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); 8446 } 8447 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8448 8449 template <typename Dispatch> 8450 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createVideoSessionKHR(const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,Dispatch const & d) const8451 Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, 8452 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8453 VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, 8454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8455 { 8456 return static_cast<Result>( 8457 d.vkCreateVideoSessionKHR( m_device, 8458 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), 8459 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8460 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); 8461 } 8462 8463 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8464 template <typename Dispatch> 8465 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8466 typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type createVideoSessionKHR(const VideoSessionCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8467 Device::createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo, 8468 Optional<const AllocationCallbacks> allocator, 8469 Dispatch const & d ) const 8470 { 8471 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 8472 Result result = static_cast<Result>( 8473 d.vkCreateVideoSessionKHR( m_device, 8474 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 8475 reinterpret_cast<const VkAllocationCallbacks *>( 8476 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8477 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 8478 return createResultValue( result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); 8479 } 8480 8481 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8482 template <typename Dispatch> 8483 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8484 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type createVideoSessionKHRUnique(const VideoSessionCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8485 Device::createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo, 8486 Optional<const AllocationCallbacks> allocator, 8487 Dispatch const & d ) const 8488 { 8489 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; 8490 Result result = static_cast<Result>( 8491 d.vkCreateVideoSessionKHR( m_device, 8492 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), 8493 reinterpret_cast<const VkAllocationCallbacks *>( 8494 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8495 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); 8496 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 8497 return createResultValue<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( 8498 result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique", deleter ); 8499 } 8500 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 8501 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8502 8503 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8504 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8505 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8506 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8507 { 8508 d.vkDestroyVideoSessionKHR( m_device, 8509 static_cast<VkVideoSessionKHR>( videoSession ), 8510 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8511 } 8512 8513 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8514 template <typename Dispatch> destroyVideoSessionKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8515 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8516 Optional<const AllocationCallbacks> allocator, 8517 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8518 { 8519 d.vkDestroyVideoSessionKHR( m_device, 8520 static_cast<VkVideoSessionKHR>( videoSession ), 8521 reinterpret_cast<const VkAllocationCallbacks *>( 8522 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8523 } 8524 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8525 8526 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8527 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8528 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8529 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8530 { 8531 d.vkDestroyVideoSessionKHR( m_device, 8532 static_cast<VkVideoSessionKHR>( videoSession ), 8533 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8534 } 8535 8536 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8537 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8538 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8539 Optional<const AllocationCallbacks> allocator, 8540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8541 { 8542 d.vkDestroyVideoSessionKHR( m_device, 8543 static_cast<VkVideoSessionKHR>( videoSession ), 8544 reinterpret_cast<const VkAllocationCallbacks *>( 8545 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8546 } 8547 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8548 8549 template <typename Dispatch> getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t * pVideoSessionMemoryRequirementsCount,VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,Dispatch const & d) const8550 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( 8551 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8552 uint32_t * pVideoSessionMemoryRequirementsCount, 8553 VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, 8554 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8555 { 8556 return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( 8557 m_device, 8558 static_cast<VkVideoSessionKHR>( videoSession ), 8559 pVideoSessionMemoryRequirementsCount, 8560 reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( pVideoSessionMemoryRequirements ) ) ); 8561 } 8562 8563 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8564 template <typename VideoGetMemoryPropertiesKHRAllocator, typename Dispatch> 8565 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8566 typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,Dispatch const & d) const8567 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8568 Dispatch const & d ) const 8569 { 8570 std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements; 8571 uint32_t videoSessionMemoryRequirementsCount; 8572 Result result; 8573 do 8574 { 8575 result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( 8576 m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); 8577 if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) 8578 { 8579 videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); 8580 result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( 8581 m_device, 8582 static_cast<VkVideoSessionKHR>( videoSession ), 8583 &videoSessionMemoryRequirementsCount, 8584 reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) ); 8585 VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); 8586 } 8587 } while ( result == Result::eIncomplete ); 8588 if ( ( result == Result::eSuccess ) && 8589 ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) ) 8590 { 8591 videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); 8592 } 8593 return createResultValue( result, 8594 videoSessionMemoryRequirements, 8595 VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); 8596 } 8597 8598 template < 8599 typename VideoGetMemoryPropertiesKHRAllocator, 8600 typename Dispatch, 8601 typename B, 8602 typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type> 8603 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 8604 typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type getVideoSessionMemoryRequirementsKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,Dispatch const & d) const8605 Device::getVideoSessionMemoryRequirementsKHR( 8606 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8607 VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, 8608 Dispatch const & d ) const 8609 { 8610 std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements( 8611 videoGetMemoryPropertiesKHRAllocator ); 8612 uint32_t videoSessionMemoryRequirementsCount; 8613 Result result; 8614 do 8615 { 8616 result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( 8617 m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); 8618 if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) 8619 { 8620 videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); 8621 result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( 8622 m_device, 8623 static_cast<VkVideoSessionKHR>( videoSession ), 8624 &videoSessionMemoryRequirementsCount, 8625 reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) ); 8626 VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); 8627 } 8628 } while ( result == Result::eIncomplete ); 8629 if ( ( result == Result::eSuccess ) && 8630 ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) ) 8631 { 8632 videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); 8633 } 8634 return createResultValue( result, 8635 videoSessionMemoryRequirements, 8636 VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); 8637 } 8638 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8639 8640 template <typename Dispatch> 8641 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,uint32_t videoSessionBindMemoryCount,const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,Dispatch const & d) const8642 Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8643 uint32_t videoSessionBindMemoryCount, 8644 const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, 8645 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8646 { 8647 return static_cast<Result>( 8648 d.vkBindVideoSessionMemoryKHR( m_device, 8649 static_cast<VkVideoSessionKHR>( videoSession ), 8650 videoSessionBindMemoryCount, 8651 reinterpret_cast<const VkVideoBindMemoryKHR *>( pVideoSessionBindMemories ) ) ); 8652 } 8653 8654 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8655 template <typename Dispatch> 8656 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindVideoSessionMemoryKHR(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,Dispatch const & d) const8657 Device::bindVideoSessionMemoryKHR( 8658 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, 8659 ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories, 8660 Dispatch const & d ) const 8661 { 8662 Result result = static_cast<Result>( d.vkBindVideoSessionMemoryKHR( 8663 m_device, 8664 static_cast<VkVideoSessionKHR>( videoSession ), 8665 videoSessionBindMemories.size(), 8666 reinterpret_cast<const VkVideoBindMemoryKHR *>( videoSessionBindMemories.data() ) ) ); 8667 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); 8668 } 8669 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8670 8671 template <typename Dispatch> createVideoSessionParametersKHR(const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,Dispatch const & d) const8672 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( 8673 const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, 8674 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8675 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, 8676 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8677 { 8678 return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( 8679 m_device, 8680 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), 8681 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 8682 reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); 8683 } 8684 8685 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8686 template <typename Dispatch> 8687 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8688 typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type createVideoSessionParametersKHR(const VideoSessionParametersCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8689 Device::createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo, 8690 Optional<const AllocationCallbacks> allocator, 8691 Dispatch const & d ) const 8692 { 8693 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 8694 Result result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR( 8695 m_device, 8696 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 8697 reinterpret_cast<const VkAllocationCallbacks *>( 8698 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8699 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 8700 return createResultValue( 8701 result, videoSessionParameters, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); 8702 } 8703 8704 # ifndef VULKAN_HPP_NO_SMART_HANDLE 8705 template <typename Dispatch> 8706 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 8707 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type createVideoSessionParametersKHRUnique(const VideoSessionParametersCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8708 Device::createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo, 8709 Optional<const AllocationCallbacks> allocator, 8710 Dispatch const & d ) const 8711 { 8712 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; 8713 Result result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR( 8714 m_device, 8715 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), 8716 reinterpret_cast<const VkAllocationCallbacks *>( 8717 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 8718 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); 8719 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 8720 return createResultValue<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( 8721 result, 8722 videoSessionParameters, 8723 VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique", 8724 deleter ); 8725 } 8726 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 8727 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8728 8729 template <typename Dispatch> updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,Dispatch const & d) const8730 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( 8731 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 8732 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, 8733 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8734 { 8735 return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( 8736 m_device, 8737 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 8738 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); 8739 } 8740 8741 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8742 template <typename Dispatch> 8743 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type updateVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VideoSessionParametersUpdateInfoKHR & updateInfo,Dispatch const & d) const8744 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 8745 const VideoSessionParametersUpdateInfoKHR & updateInfo, 8746 Dispatch const & d ) const 8747 { 8748 Result result = static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( 8749 m_device, 8750 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 8751 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) ); 8752 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); 8753 } 8754 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8755 8756 template <typename Dispatch> 8757 VULKAN_HPP_INLINE void destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8758 Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 8759 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8760 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8761 { 8762 d.vkDestroyVideoSessionParametersKHR( m_device, 8763 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 8764 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8765 } 8766 8767 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8768 template <typename Dispatch> 8769 VULKAN_HPP_INLINE void destroyVideoSessionParametersKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8770 Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 8771 Optional<const AllocationCallbacks> allocator, 8772 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8773 { 8774 d.vkDestroyVideoSessionParametersKHR( 8775 m_device, 8776 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 8777 reinterpret_cast<const VkAllocationCallbacks *>( 8778 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8779 } 8780 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8781 8782 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const8783 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 8784 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 8785 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8786 { 8787 d.vkDestroyVideoSessionParametersKHR( m_device, 8788 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 8789 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 8790 } 8791 8792 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8793 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const8794 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, 8795 Optional<const AllocationCallbacks> allocator, 8796 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8797 { 8798 d.vkDestroyVideoSessionParametersKHR( 8799 m_device, 8800 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), 8801 reinterpret_cast<const VkAllocationCallbacks *>( 8802 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 8803 } 8804 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8805 8806 template <typename Dispatch> 8807 VULKAN_HPP_INLINE void beginVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,Dispatch const & d) const8808 CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, 8809 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8810 { 8811 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); 8812 } 8813 8814 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8815 template <typename Dispatch> beginVideoCodingKHR(const VideoBeginCodingInfoKHR & beginInfo,Dispatch const & d) const8816 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo, 8817 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8818 { 8819 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); 8820 } 8821 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8822 8823 template <typename Dispatch> 8824 VULKAN_HPP_INLINE void endVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,Dispatch const & d) const8825 CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, 8826 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8827 { 8828 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); 8829 } 8830 8831 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8832 template <typename Dispatch> endVideoCodingKHR(const VideoEndCodingInfoKHR & endCodingInfo,Dispatch const & d) const8833 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo, 8834 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8835 { 8836 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); 8837 } 8838 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8839 8840 template <typename Dispatch> 8841 VULKAN_HPP_INLINE void controlVideoCodingKHR(const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,Dispatch const & d) const8842 CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, 8843 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8844 { 8845 d.vkCmdControlVideoCodingKHR( m_commandBuffer, 8846 reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); 8847 } 8848 8849 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8850 template <typename Dispatch> controlVideoCodingKHR(const VideoCodingControlInfoKHR & codingControlInfo,Dispatch const & d) const8851 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo, 8852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8853 { 8854 d.vkCmdControlVideoCodingKHR( m_commandBuffer, 8855 reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); 8856 } 8857 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8858 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 8859 8860 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 8861 //=== VK_KHR_video_decode_queue === 8862 8863 template <typename Dispatch> decodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,Dispatch const & d) const8864 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, 8865 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8866 { 8867 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pFrameInfo ) ); 8868 } 8869 8870 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8871 template <typename Dispatch> decodeVideoKHR(const VideoDecodeInfoKHR & frameInfo,Dispatch const & d) const8872 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo, 8873 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8874 { 8875 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) ); 8876 } 8877 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8878 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 8879 8880 //=== VK_EXT_transform_feedback === 8881 8882 template <typename Dispatch> 8883 VULKAN_HPP_INLINE void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,Dispatch const & d) const8884 CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 8885 uint32_t bindingCount, 8886 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 8887 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 8888 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 8889 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8890 { 8891 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 8892 firstBinding, 8893 bindingCount, 8894 reinterpret_cast<const VkBuffer *>( pBuffers ), 8895 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 8896 reinterpret_cast<const VkDeviceSize *>( pSizes ) ); 8897 } 8898 8899 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8900 template <typename Dispatch> 8901 VULKAN_HPP_INLINE void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,Dispatch const & d) const8902 CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, 8903 ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 8904 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 8905 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 8906 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 8907 { 8908 # ifdef VULKAN_HPP_NO_EXCEPTIONS 8909 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 8910 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 8911 # else 8912 if ( buffers.size() != offsets.size() ) 8913 { 8914 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 8915 "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); 8916 } 8917 if ( !sizes.empty() && buffers.size() != sizes.size() ) 8918 { 8919 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 8920 "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); 8921 } 8922 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 8923 8924 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, 8925 firstBinding, 8926 buffers.size(), 8927 reinterpret_cast<const VkBuffer *>( buffers.data() ), 8928 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 8929 reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); 8930 } 8931 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8932 8933 template <typename Dispatch> 8934 VULKAN_HPP_INLINE void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const8935 CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, 8936 uint32_t counterBufferCount, 8937 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 8938 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 8939 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8940 { 8941 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 8942 firstCounterBuffer, 8943 counterBufferCount, 8944 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 8945 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 8946 } 8947 8948 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8949 template <typename Dispatch> beginTransformFeedbackEXT(uint32_t firstCounterBuffer,ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const8950 VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( 8951 uint32_t firstCounterBuffer, 8952 ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 8953 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 8954 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 8955 { 8956 # ifdef VULKAN_HPP_NO_EXCEPTIONS 8957 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 8958 # else 8959 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 8960 { 8961 throw LogicError( 8962 VULKAN_HPP_NAMESPACE_STRING 8963 "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 8964 } 8965 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 8966 8967 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, 8968 firstCounterBuffer, 8969 counterBuffers.size(), 8970 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 8971 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 8972 } 8973 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 8974 8975 template <typename Dispatch> 8976 VULKAN_HPP_INLINE void endTransformFeedbackEXT(uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,Dispatch const & d) const8977 CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, 8978 uint32_t counterBufferCount, 8979 const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, 8980 const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, 8981 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 8982 { 8983 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 8984 firstCounterBuffer, 8985 counterBufferCount, 8986 reinterpret_cast<const VkBuffer *>( pCounterBuffers ), 8987 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); 8988 } 8989 8990 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 8991 template <typename Dispatch> endTransformFeedbackEXT(uint32_t firstCounterBuffer,ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,Dispatch const & d) const8992 VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( 8993 uint32_t firstCounterBuffer, 8994 ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, 8995 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, 8996 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 8997 { 8998 # ifdef VULKAN_HPP_NO_EXCEPTIONS 8999 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); 9000 # else 9001 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) 9002 { 9003 throw LogicError( 9004 VULKAN_HPP_NAMESPACE_STRING 9005 "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); 9006 } 9007 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 9008 9009 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, 9010 firstCounterBuffer, 9011 counterBuffers.size(), 9012 reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), 9013 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); 9014 } 9015 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9016 9017 template <typename Dispatch> beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,uint32_t index,Dispatch const & d) const9018 VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 9019 uint32_t query, 9020 VULKAN_HPP_NAMESPACE::QueryControlFlags flags, 9021 uint32_t index, 9022 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9023 { 9024 d.vkCmdBeginQueryIndexedEXT( 9025 m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); 9026 } 9027 9028 template <typename Dispatch> endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,uint32_t index,Dispatch const & d) const9029 VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 9030 uint32_t query, 9031 uint32_t index, 9032 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9033 { 9034 d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index ); 9035 } 9036 9037 template <typename Dispatch> drawIndirectByteCountEXT(uint32_t instanceCount,uint32_t firstInstance,VULKAN_HPP_NAMESPACE::Buffer counterBuffer,VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride,Dispatch const & d) const9038 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, 9039 uint32_t firstInstance, 9040 VULKAN_HPP_NAMESPACE::Buffer counterBuffer, 9041 VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, 9042 uint32_t counterOffset, 9043 uint32_t vertexStride, 9044 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9045 { 9046 d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, 9047 instanceCount, 9048 firstInstance, 9049 static_cast<VkBuffer>( counterBuffer ), 9050 static_cast<VkDeviceSize>( counterBufferOffset ), 9051 counterOffset, 9052 vertexStride ); 9053 } 9054 9055 //=== VK_NVX_binary_import === 9056 9057 template <typename Dispatch> 9058 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,Dispatch const & d) const9059 Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, 9060 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9061 VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, 9062 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9063 { 9064 return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, 9065 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), 9066 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9067 reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); 9068 } 9069 9070 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9071 template <typename Dispatch> 9072 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9073 typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type createCuModuleNVX(const CuModuleCreateInfoNVX & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9074 Device::createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo, 9075 Optional<const AllocationCallbacks> allocator, 9076 Dispatch const & d ) const 9077 { 9078 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 9079 Result result = static_cast<Result>( 9080 d.vkCreateCuModuleNVX( m_device, 9081 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 9082 reinterpret_cast<const VkAllocationCallbacks *>( 9083 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9084 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 9085 return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); 9086 } 9087 9088 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9089 template <typename Dispatch> 9090 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9091 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type createCuModuleNVXUnique(const CuModuleCreateInfoNVX & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9092 Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo, 9093 Optional<const AllocationCallbacks> allocator, 9094 Dispatch const & d ) const 9095 { 9096 VULKAN_HPP_NAMESPACE::CuModuleNVX module; 9097 Result result = static_cast<Result>( 9098 d.vkCreateCuModuleNVX( m_device, 9099 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), 9100 reinterpret_cast<const VkAllocationCallbacks *>( 9101 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9102 reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); 9103 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 9104 return createResultValue<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( 9105 result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter ); 9106 } 9107 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 9108 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9109 9110 template <typename Dispatch> 9111 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,Dispatch const & d) const9112 Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, 9113 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9114 VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, 9115 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9116 { 9117 return static_cast<Result>( 9118 d.vkCreateCuFunctionNVX( m_device, 9119 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), 9120 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9121 reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); 9122 } 9123 9124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9125 template <typename Dispatch> 9126 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9127 typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type createCuFunctionNVX(const CuFunctionCreateInfoNVX & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9128 Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo, 9129 Optional<const AllocationCallbacks> allocator, 9130 Dispatch const & d ) const 9131 { 9132 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 9133 Result result = static_cast<Result>( 9134 d.vkCreateCuFunctionNVX( m_device, 9135 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 9136 reinterpret_cast<const VkAllocationCallbacks *>( 9137 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9138 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 9139 return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); 9140 } 9141 9142 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9143 template <typename Dispatch> 9144 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9145 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type createCuFunctionNVXUnique(const CuFunctionCreateInfoNVX & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9146 Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo, 9147 Optional<const AllocationCallbacks> allocator, 9148 Dispatch const & d ) const 9149 { 9150 VULKAN_HPP_NAMESPACE::CuFunctionNVX function; 9151 Result result = static_cast<Result>( 9152 d.vkCreateCuFunctionNVX( m_device, 9153 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), 9154 reinterpret_cast<const VkAllocationCallbacks *>( 9155 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9156 reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); 9157 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 9158 return createResultValue<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( 9159 result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter ); 9160 } 9161 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 9162 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9163 9164 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9165 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 9166 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9167 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9168 { 9169 d.vkDestroyCuModuleNVX( 9170 m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9171 } 9172 9173 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9174 template <typename Dispatch> destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9175 VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 9176 Optional<const AllocationCallbacks> allocator, 9177 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9178 { 9179 d.vkDestroyCuModuleNVX( m_device, 9180 static_cast<VkCuModuleNVX>( module ), 9181 reinterpret_cast<const VkAllocationCallbacks *>( 9182 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9183 } 9184 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9185 9186 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9187 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 9188 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9189 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9190 { 9191 d.vkDestroyCuModuleNVX( 9192 m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9193 } 9194 9195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9196 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuModuleNVX module,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9197 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, 9198 Optional<const AllocationCallbacks> allocator, 9199 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9200 { 9201 d.vkDestroyCuModuleNVX( m_device, 9202 static_cast<VkCuModuleNVX>( module ), 9203 reinterpret_cast<const VkAllocationCallbacks *>( 9204 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9205 } 9206 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9207 9208 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9209 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 9210 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9211 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9212 { 9213 d.vkDestroyCuFunctionNVX( m_device, 9214 static_cast<VkCuFunctionNVX>( function ), 9215 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9216 } 9217 9218 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9219 template <typename Dispatch> destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9220 VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 9221 Optional<const AllocationCallbacks> allocator, 9222 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9223 { 9224 d.vkDestroyCuFunctionNVX( m_device, 9225 static_cast<VkCuFunctionNVX>( function ), 9226 reinterpret_cast<const VkAllocationCallbacks *>( 9227 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9228 } 9229 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9230 9231 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const9232 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 9233 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9234 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9235 { 9236 d.vkDestroyCuFunctionNVX( m_device, 9237 static_cast<VkCuFunctionNVX>( function ), 9238 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 9239 } 9240 9241 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9242 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::CuFunctionNVX function,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9243 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, 9244 Optional<const AllocationCallbacks> allocator, 9245 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9246 { 9247 d.vkDestroyCuFunctionNVX( m_device, 9248 static_cast<VkCuFunctionNVX>( function ), 9249 reinterpret_cast<const VkAllocationCallbacks *>( 9250 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 9251 } 9252 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9253 9254 template <typename Dispatch> cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,Dispatch const & d) const9255 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, 9256 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9257 { 9258 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); 9259 } 9260 9261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9262 template <typename Dispatch> cuLaunchKernelNVX(const CuLaunchInfoNVX & launchInfo,Dispatch const & d) const9263 VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo, 9264 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9265 { 9266 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); 9267 } 9268 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9269 9270 //=== VK_NVX_image_view_handle === 9271 9272 template <typename Dispatch> getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,Dispatch const & d) const9273 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, 9274 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9275 { 9276 return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); 9277 } 9278 9279 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9280 template <typename Dispatch> getImageViewHandleNVX(const ImageViewHandleInfoNVX & info,Dispatch const & d) const9281 VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, 9282 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9283 { 9284 return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); 9285 } 9286 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9287 9288 template <typename Dispatch> 9289 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,Dispatch const & d) const9290 Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, 9291 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, 9292 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9293 { 9294 return static_cast<Result>( 9295 d.vkGetImageViewAddressNVX( m_device, 9296 static_cast<VkImageView>( imageView ), 9297 reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); 9298 } 9299 9300 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9301 template <typename Dispatch> 9302 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9303 typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView,Dispatch const & d) const9304 Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const 9305 { 9306 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; 9307 Result result = static_cast<Result>( 9308 d.vkGetImageViewAddressNVX( m_device, 9309 static_cast<VkImageView>( imageView ), 9310 reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) ); 9311 return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); 9312 } 9313 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9314 9315 //=== VK_AMD_draw_indirect_count === 9316 9317 template <typename Dispatch> drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const9318 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 9319 VULKAN_HPP_NAMESPACE::DeviceSize offset, 9320 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 9321 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 9322 uint32_t maxDrawCount, 9323 uint32_t stride, 9324 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9325 { 9326 d.vkCmdDrawIndirectCountAMD( m_commandBuffer, 9327 static_cast<VkBuffer>( buffer ), 9328 static_cast<VkDeviceSize>( offset ), 9329 static_cast<VkBuffer>( countBuffer ), 9330 static_cast<VkDeviceSize>( countBufferOffset ), 9331 maxDrawCount, 9332 stride ); 9333 } 9334 9335 template <typename Dispatch> drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const9336 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, 9337 VULKAN_HPP_NAMESPACE::DeviceSize offset, 9338 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 9339 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 9340 uint32_t maxDrawCount, 9341 uint32_t stride, 9342 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9343 { 9344 d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, 9345 static_cast<VkBuffer>( buffer ), 9346 static_cast<VkDeviceSize>( offset ), 9347 static_cast<VkBuffer>( countBuffer ), 9348 static_cast<VkDeviceSize>( countBufferOffset ), 9349 maxDrawCount, 9350 stride ); 9351 } 9352 9353 //=== VK_AMD_shader_info === 9354 9355 template <typename Dispatch> 9356 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,size_t * pInfoSize,void * pInfo,Dispatch const & d) const9357 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 9358 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 9359 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 9360 size_t * pInfoSize, 9361 void * pInfo, 9362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9363 { 9364 return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 9365 static_cast<VkPipeline>( pipeline ), 9366 static_cast<VkShaderStageFlagBits>( shaderStage ), 9367 static_cast<VkShaderInfoTypeAMD>( infoType ), 9368 pInfoSize, 9369 pInfo ) ); 9370 } 9371 9372 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9373 template <typename Uint8_tAllocator, typename Dispatch> 9374 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Dispatch const & d) const9375 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 9376 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 9377 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 9378 Dispatch const & d ) const 9379 { 9380 std::vector<uint8_t, Uint8_tAllocator> info; 9381 size_t infoSize; 9382 Result result; 9383 do 9384 { 9385 result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 9386 static_cast<VkPipeline>( pipeline ), 9387 static_cast<VkShaderStageFlagBits>( shaderStage ), 9388 static_cast<VkShaderInfoTypeAMD>( infoType ), 9389 &infoSize, 9390 nullptr ) ); 9391 if ( ( result == Result::eSuccess ) && infoSize ) 9392 { 9393 info.resize( infoSize ); 9394 result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 9395 static_cast<VkPipeline>( pipeline ), 9396 static_cast<VkShaderStageFlagBits>( shaderStage ), 9397 static_cast<VkShaderInfoTypeAMD>( infoType ), 9398 &infoSize, 9399 reinterpret_cast<void *>( info.data() ) ) ); 9400 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 9401 } 9402 } while ( result == Result::eIncomplete ); 9403 if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) ) 9404 { 9405 info.resize( infoSize ); 9406 } 9407 return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 9408 } 9409 9410 template <typename Uint8_tAllocator, 9411 typename Dispatch, 9412 typename B, 9413 typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type> 9414 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline,VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const9415 Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 9416 VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, 9417 VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, 9418 Uint8_tAllocator & uint8_tAllocator, 9419 Dispatch const & d ) const 9420 { 9421 std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); 9422 size_t infoSize; 9423 Result result; 9424 do 9425 { 9426 result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 9427 static_cast<VkPipeline>( pipeline ), 9428 static_cast<VkShaderStageFlagBits>( shaderStage ), 9429 static_cast<VkShaderInfoTypeAMD>( infoType ), 9430 &infoSize, 9431 nullptr ) ); 9432 if ( ( result == Result::eSuccess ) && infoSize ) 9433 { 9434 info.resize( infoSize ); 9435 result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, 9436 static_cast<VkPipeline>( pipeline ), 9437 static_cast<VkShaderStageFlagBits>( shaderStage ), 9438 static_cast<VkShaderInfoTypeAMD>( infoType ), 9439 &infoSize, 9440 reinterpret_cast<void *>( info.data() ) ) ); 9441 VULKAN_HPP_ASSERT( infoSize <= info.size() ); 9442 } 9443 } while ( result == Result::eIncomplete ); 9444 if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) ) 9445 { 9446 info.resize( infoSize ); 9447 } 9448 return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); 9449 } 9450 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9451 9452 #if defined( VK_USE_PLATFORM_GGP ) 9453 //=== VK_GGP_stream_descriptor_surface === 9454 9455 template <typename Dispatch> createStreamDescriptorSurfaceGGP(const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const9456 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( 9457 const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, 9458 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 9459 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 9460 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9461 { 9462 return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( 9463 m_instance, 9464 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), 9465 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 9466 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 9467 } 9468 9469 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9470 template <typename Dispatch> 9471 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9472 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP(const StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9473 Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, 9474 Optional<const AllocationCallbacks> allocator, 9475 Dispatch const & d ) const 9476 { 9477 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9478 Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( 9479 m_instance, 9480 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 9481 reinterpret_cast<const VkAllocationCallbacks *>( 9482 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9483 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9484 return createResultValue( 9485 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); 9486 } 9487 9488 # ifndef VULKAN_HPP_NO_SMART_HANDLE 9489 template <typename Dispatch> 9490 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9491 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique(const StreamDescriptorSurfaceCreateInfoGGP & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const9492 Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, 9493 Optional<const AllocationCallbacks> allocator, 9494 Dispatch const & d ) const 9495 { 9496 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 9497 Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( 9498 m_instance, 9499 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), 9500 reinterpret_cast<const VkAllocationCallbacks *>( 9501 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 9502 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 9503 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 9504 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 9505 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); 9506 } 9507 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 9508 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9509 #endif /*VK_USE_PLATFORM_GGP*/ 9510 9511 //=== VK_NV_external_memory_capabilities === 9512 9513 template <typename Dispatch> getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,Dispatch const & d) const9514 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( 9515 VULKAN_HPP_NAMESPACE::Format format, 9516 VULKAN_HPP_NAMESPACE::ImageType type, 9517 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 9518 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 9519 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 9520 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 9521 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, 9522 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9523 { 9524 return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( 9525 m_physicalDevice, 9526 static_cast<VkFormat>( format ), 9527 static_cast<VkImageType>( type ), 9528 static_cast<VkImageTiling>( tiling ), 9529 static_cast<VkImageUsageFlags>( usage ), 9530 static_cast<VkImageCreateFlags>( flags ), 9531 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 9532 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); 9533 } 9534 9535 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9536 template <typename Dispatch> 9537 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9538 typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,Dispatch const & d) const9539 PhysicalDevice::getExternalImageFormatPropertiesNV( 9540 VULKAN_HPP_NAMESPACE::Format format, 9541 VULKAN_HPP_NAMESPACE::ImageType type, 9542 VULKAN_HPP_NAMESPACE::ImageTiling tiling, 9543 VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, 9544 VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, 9545 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, 9546 Dispatch const & d ) const 9547 { 9548 VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; 9549 Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( 9550 m_physicalDevice, 9551 static_cast<VkFormat>( format ), 9552 static_cast<VkImageType>( type ), 9553 static_cast<VkImageTiling>( tiling ), 9554 static_cast<VkImageUsageFlags>( usage ), 9555 static_cast<VkImageCreateFlags>( flags ), 9556 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), 9557 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) ); 9558 return createResultValue( result, 9559 externalImageFormatProperties, 9560 VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); 9561 } 9562 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9563 9564 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 9565 //=== VK_NV_external_memory_win32 === 9566 9567 template <typename Dispatch> 9568 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,HANDLE * pHandle,Dispatch const & d) const9569 Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 9570 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, 9571 HANDLE * pHandle, 9572 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9573 { 9574 return static_cast<Result>( 9575 d.vkGetMemoryWin32HandleNV( m_device, 9576 static_cast<VkDeviceMemory>( memory ), 9577 static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), 9578 pHandle ) ); 9579 } 9580 9581 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9582 template <typename Dispatch> 9583 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,Dispatch const & d) const9584 Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, 9585 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, 9586 Dispatch const & d ) const 9587 { 9588 HANDLE handle; 9589 Result result = 9590 static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, 9591 static_cast<VkDeviceMemory>( memory ), 9592 static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), 9593 &handle ) ); 9594 return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); 9595 } 9596 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9597 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 9598 9599 //=== VK_KHR_get_physical_device_properties2 === 9600 9601 template <typename Dispatch> getFeatures2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const9602 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, 9603 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9604 { 9605 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); 9606 } 9607 9608 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9609 template <typename Dispatch> 9610 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const & d) const9611 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9612 { 9613 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; 9614 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 9615 return features; 9616 } 9617 9618 template <typename X, typename Y, typename... Z, typename Dispatch> 9619 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const & d) const9620 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9621 { 9622 StructureChain<X, Y, Z...> structureChain; 9623 VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = 9624 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); 9625 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); 9626 return structureChain; 9627 } 9628 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9629 9630 template <typename Dispatch> 9631 VULKAN_HPP_INLINE void getProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const9632 PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, 9633 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9634 { 9635 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, 9636 reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); 9637 } 9638 9639 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9640 template <typename Dispatch> 9641 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const & d) const9642 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9643 { 9644 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; 9645 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, 9646 reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 9647 return properties; 9648 } 9649 9650 template <typename X, typename Y, typename... Z, typename Dispatch> 9651 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const & d) const9652 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9653 { 9654 StructureChain<X, Y, Z...> structureChain; 9655 VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = 9656 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); 9657 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, 9658 reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); 9659 return structureChain; 9660 } 9661 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9662 9663 template <typename Dispatch> 9664 VULKAN_HPP_INLINE void getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const9665 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 9666 VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, 9667 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9668 { 9669 d.vkGetPhysicalDeviceFormatProperties2KHR( 9670 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); 9671 } 9672 9673 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9674 template <typename Dispatch> 9675 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const9676 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 9677 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9678 { 9679 VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; 9680 d.vkGetPhysicalDeviceFormatProperties2KHR( 9681 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 9682 return formatProperties; 9683 } 9684 9685 template <typename X, typename Y, typename... Z, typename Dispatch> 9686 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const9687 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, 9688 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9689 { 9690 StructureChain<X, Y, Z...> structureChain; 9691 VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = 9692 structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); 9693 d.vkGetPhysicalDeviceFormatProperties2KHR( 9694 m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); 9695 return structureChain; 9696 } 9697 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9698 9699 template <typename Dispatch> getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const9700 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( 9701 const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, 9702 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, 9703 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9704 { 9705 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( 9706 m_physicalDevice, 9707 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), 9708 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); 9709 } 9710 9711 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9712 template <typename Dispatch> 9713 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 9714 typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR(const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const9715 PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, 9716 Dispatch const & d ) const 9717 { 9718 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; 9719 Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( 9720 m_physicalDevice, 9721 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 9722 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 9723 return createResultValue( 9724 result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 9725 } 9726 9727 template <typename X, typename Y, typename... Z, typename Dispatch> 9728 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR(const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const9729 PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, 9730 Dispatch const & d ) const 9731 { 9732 StructureChain<X, Y, Z...> structureChain; 9733 VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = 9734 structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); 9735 Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( 9736 m_physicalDevice, 9737 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), 9738 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); 9739 return createResultValue( 9740 result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); 9741 } 9742 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9743 9744 template <typename Dispatch> 9745 VULKAN_HPP_INLINE void getQueueFamilyProperties2KHR(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const9746 PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, 9747 VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, 9748 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9749 { 9750 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 9751 m_physicalDevice, 9752 pQueueFamilyPropertyCount, 9753 reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); 9754 } 9755 9756 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9757 template <typename QueueFamilyProperties2Allocator, typename Dispatch> 9758 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(Dispatch const & d) const9759 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 9760 { 9761 std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; 9762 uint32_t queueFamilyPropertyCount; 9763 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 9764 queueFamilyProperties.resize( queueFamilyPropertyCount ); 9765 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 9766 m_physicalDevice, 9767 &queueFamilyPropertyCount, 9768 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 9769 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 9770 return queueFamilyProperties; 9771 } 9772 9773 template <typename QueueFamilyProperties2Allocator, 9774 typename Dispatch, 9775 typename B, 9776 typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type> 9777 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const9778 PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, 9779 Dispatch const & d ) const 9780 { 9781 std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( 9782 queueFamilyProperties2Allocator ); 9783 uint32_t queueFamilyPropertyCount; 9784 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 9785 queueFamilyProperties.resize( queueFamilyPropertyCount ); 9786 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 9787 m_physicalDevice, 9788 &queueFamilyPropertyCount, 9789 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 9790 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 9791 return queueFamilyProperties; 9792 } 9793 9794 template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> 9795 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(Dispatch const & d) const9796 PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const 9797 { 9798 uint32_t queueFamilyPropertyCount; 9799 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 9800 std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount ); 9801 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount ); 9802 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 9803 { 9804 queueFamilyProperties[i].pNext = 9805 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 9806 } 9807 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 9808 m_physicalDevice, 9809 &queueFamilyPropertyCount, 9810 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 9811 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 9812 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 9813 { 9814 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 9815 } 9816 return returnVector; 9817 } 9818 9819 template <typename StructureChain, 9820 typename StructureChainAllocator, 9821 typename Dispatch, 9822 typename B, 9823 typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type> 9824 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const9825 PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, 9826 Dispatch const & d ) const 9827 { 9828 uint32_t queueFamilyPropertyCount; 9829 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); 9830 std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount, 9831 structureChainAllocator ); 9832 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount ); 9833 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 9834 { 9835 queueFamilyProperties[i].pNext = 9836 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; 9837 } 9838 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( 9839 m_physicalDevice, 9840 &queueFamilyPropertyCount, 9841 reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); 9842 VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); 9843 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) 9844 { 9845 returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; 9846 } 9847 return returnVector; 9848 } 9849 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9850 9851 template <typename Dispatch> 9852 VULKAN_HPP_INLINE void getMemoryProperties2KHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const9853 PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, 9854 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9855 { 9856 d.vkGetPhysicalDeviceMemoryProperties2KHR( 9857 m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); 9858 } 9859 9860 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9861 template <typename Dispatch> 9862 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const & d) const9863 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9864 { 9865 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; 9866 d.vkGetPhysicalDeviceMemoryProperties2KHR( 9867 m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 9868 return memoryProperties; 9869 } 9870 9871 template <typename X, typename Y, typename... Z, typename Dispatch> 9872 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const & d) const9873 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9874 { 9875 StructureChain<X, Y, Z...> structureChain; 9876 VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = 9877 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); 9878 d.vkGetPhysicalDeviceMemoryProperties2KHR( 9879 m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); 9880 return structureChain; 9881 } 9882 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9883 9884 template <typename Dispatch> getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,Dispatch const & d) const9885 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( 9886 const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, 9887 uint32_t * pPropertyCount, 9888 VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, 9889 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9890 { 9891 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 9892 m_physicalDevice, 9893 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), 9894 pPropertyCount, 9895 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); 9896 } 9897 9898 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9899 template <typename SparseImageFormatProperties2Allocator, typename Dispatch> 9900 VULKAN_HPP_NODISCARD 9901 VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,Dispatch const & d) const9902 PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 9903 Dispatch const & d ) const 9904 { 9905 std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; 9906 uint32_t propertyCount; 9907 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 9908 m_physicalDevice, 9909 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 9910 &propertyCount, 9911 nullptr ); 9912 properties.resize( propertyCount ); 9913 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 9914 m_physicalDevice, 9915 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 9916 &propertyCount, 9917 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 9918 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9919 return properties; 9920 } 9921 9922 template < 9923 typename SparseImageFormatProperties2Allocator, 9924 typename Dispatch, 9925 typename B, 9926 typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type> 9927 VULKAN_HPP_NODISCARD 9928 VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR(const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,Dispatch const & d) const9929 PhysicalDevice::getSparseImageFormatProperties2KHR( 9930 const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, 9931 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, 9932 Dispatch const & d ) const 9933 { 9934 std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( 9935 sparseImageFormatProperties2Allocator ); 9936 uint32_t propertyCount; 9937 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 9938 m_physicalDevice, 9939 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 9940 &propertyCount, 9941 nullptr ); 9942 properties.resize( propertyCount ); 9943 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( 9944 m_physicalDevice, 9945 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), 9946 &propertyCount, 9947 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); 9948 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 9949 return properties; 9950 } 9951 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9952 9953 //=== VK_KHR_device_group === 9954 9955 template <typename Dispatch> 9956 VULKAN_HPP_INLINE void getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const9957 Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, 9958 uint32_t localDeviceIndex, 9959 uint32_t remoteDeviceIndex, 9960 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, 9961 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9962 { 9963 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, 9964 heapIndex, 9965 localDeviceIndex, 9966 remoteDeviceIndex, 9967 reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); 9968 } 9969 9970 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 9971 template <typename Dispatch> 9972 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const9973 Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, 9974 uint32_t localDeviceIndex, 9975 uint32_t remoteDeviceIndex, 9976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9977 { 9978 VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; 9979 d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, 9980 heapIndex, 9981 localDeviceIndex, 9982 remoteDeviceIndex, 9983 reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); 9984 return peerMemoryFeatures; 9985 } 9986 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 9987 9988 template <typename Dispatch> setDeviceMaskKHR(uint32_t deviceMask,Dispatch const & d) const9989 VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, 9990 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 9991 { 9992 d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); 9993 } 9994 9995 template <typename Dispatch> dispatchBaseKHR(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const9996 VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, 9997 uint32_t baseGroupY, 9998 uint32_t baseGroupZ, 9999 uint32_t groupCountX, 10000 uint32_t groupCountY, 10001 uint32_t groupCountZ, 10002 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10003 { 10004 d.vkCmdDispatchBaseKHR( 10005 m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); 10006 } 10007 10008 #if defined( VK_USE_PLATFORM_VI_NN ) 10009 //=== VK_NN_vi_surface === 10010 10011 template <typename Dispatch> 10012 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createViSurfaceNN(const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const10013 Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, 10014 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10015 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 10016 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10017 { 10018 return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, 10019 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), 10020 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10021 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 10022 } 10023 10024 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10025 template <typename Dispatch> 10026 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10027 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN(const ViSurfaceCreateInfoNN & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10028 Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, 10029 Optional<const AllocationCallbacks> allocator, 10030 Dispatch const & d ) const 10031 { 10032 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10033 Result result = static_cast<Result>( 10034 d.vkCreateViSurfaceNN( m_instance, 10035 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 10036 reinterpret_cast<const VkAllocationCallbacks *>( 10037 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10038 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10039 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); 10040 } 10041 10042 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10043 template <typename Dispatch> 10044 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10045 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique(const ViSurfaceCreateInfoNN & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10046 Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, 10047 Optional<const AllocationCallbacks> allocator, 10048 Dispatch const & d ) const 10049 { 10050 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 10051 Result result = static_cast<Result>( 10052 d.vkCreateViSurfaceNN( m_instance, 10053 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), 10054 reinterpret_cast<const VkAllocationCallbacks *>( 10055 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10056 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 10057 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 10058 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 10059 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter ); 10060 } 10061 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 10062 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10063 #endif /*VK_USE_PLATFORM_VI_NN*/ 10064 10065 //=== VK_KHR_maintenance1 === 10066 10067 template <typename Dispatch> trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,Dispatch const & d) const10068 VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, 10069 VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, 10070 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10071 { 10072 d.vkTrimCommandPoolKHR( 10073 m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); 10074 } 10075 10076 //=== VK_KHR_device_group_creation === 10077 10078 template <typename Dispatch> enumeratePhysicalDeviceGroupsKHR(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const10079 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( 10080 uint32_t * pPhysicalDeviceGroupCount, 10081 VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, 10082 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10083 { 10084 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 10085 m_instance, 10086 pPhysicalDeviceGroupCount, 10087 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); 10088 } 10089 10090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10091 template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> 10092 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10093 typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const & d) const10094 Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const 10095 { 10096 std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; 10097 uint32_t physicalDeviceGroupCount; 10098 Result result; 10099 do 10100 { 10101 result = 10102 static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 10103 if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) 10104 { 10105 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 10106 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 10107 m_instance, 10108 &physicalDeviceGroupCount, 10109 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 10110 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 10111 } 10112 } while ( result == Result::eIncomplete ); 10113 if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) 10114 { 10115 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 10116 } 10117 return createResultValue( result, 10118 physicalDeviceGroupProperties, 10119 VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 10120 } 10121 10122 template < 10123 typename PhysicalDeviceGroupPropertiesAllocator, 10124 typename Dispatch, 10125 typename B, 10126 typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type> 10127 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10128 typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const10129 Instance::enumeratePhysicalDeviceGroupsKHR( 10130 PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const 10131 { 10132 std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( 10133 physicalDeviceGroupPropertiesAllocator ); 10134 uint32_t physicalDeviceGroupCount; 10135 Result result; 10136 do 10137 { 10138 result = 10139 static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); 10140 if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) 10141 { 10142 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 10143 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( 10144 m_instance, 10145 &physicalDeviceGroupCount, 10146 reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); 10147 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); 10148 } 10149 } while ( result == Result::eIncomplete ); 10150 if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) 10151 { 10152 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); 10153 } 10154 return createResultValue( result, 10155 physicalDeviceGroupProperties, 10156 VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); 10157 } 10158 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10159 10160 //=== VK_KHR_external_memory_capabilities === 10161 10162 template <typename Dispatch> getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const10163 VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( 10164 const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, 10165 VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, 10166 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10167 { 10168 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( 10169 m_physicalDevice, 10170 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), 10171 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); 10172 } 10173 10174 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10175 template <typename Dispatch> 10176 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(const PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const10177 PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, 10178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10179 { 10180 VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; 10181 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( 10182 m_physicalDevice, 10183 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), 10184 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); 10185 return externalBufferProperties; 10186 } 10187 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10188 10189 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 10190 //=== VK_KHR_external_memory_win32 === 10191 10192 template <typename Dispatch> 10193 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryWin32HandleKHR(const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const10194 Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, 10195 HANDLE * pHandle, 10196 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10197 { 10198 return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( 10199 m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 10200 } 10201 10202 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10203 template <typename Dispatch> 10204 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR(const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const10205 Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 10206 { 10207 HANDLE handle; 10208 Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( 10209 m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 10210 return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); 10211 } 10212 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10213 10214 template <typename Dispatch> getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,Dispatch const & d) const10215 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( 10216 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 10217 HANDLE handle, 10218 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, 10219 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10220 { 10221 return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( 10222 m_device, 10223 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 10224 handle, 10225 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); 10226 } 10227 10228 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10229 template <typename Dispatch> 10230 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10231 typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,Dispatch const & d) const10232 Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 10233 HANDLE handle, 10234 Dispatch const & d ) const 10235 { 10236 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; 10237 Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( 10238 m_device, 10239 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 10240 handle, 10241 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) ); 10242 return createResultValue( 10243 result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); 10244 } 10245 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10246 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 10247 10248 //=== VK_KHR_external_memory_fd === 10249 10250 template <typename Dispatch> 10251 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const10252 Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, 10253 int * pFd, 10254 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10255 { 10256 return static_cast<Result>( 10257 d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 10258 } 10259 10260 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10261 template <typename Dispatch> 10262 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type getMemoryFdKHR(const MemoryGetFdInfoKHR & getFdInfo,Dispatch const & d) const10263 Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 10264 { 10265 int fd; 10266 Result result = static_cast<Result>( 10267 d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 10268 return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); 10269 } 10270 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10271 10272 template <typename Dispatch> 10273 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,Dispatch const & d) const10274 Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 10275 int fd, 10276 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, 10277 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10278 { 10279 return static_cast<Result>( 10280 d.vkGetMemoryFdPropertiesKHR( m_device, 10281 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 10282 fd, 10283 reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); 10284 } 10285 10286 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10287 template <typename Dispatch> 10288 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10289 typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,Dispatch const & d) const10290 Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 10291 int fd, 10292 Dispatch const & d ) const 10293 { 10294 VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; 10295 Result result = static_cast<Result>( 10296 d.vkGetMemoryFdPropertiesKHR( m_device, 10297 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 10298 fd, 10299 reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) ); 10300 return createResultValue( 10301 result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); 10302 } 10303 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10304 10305 //=== VK_KHR_external_semaphore_capabilities === 10306 10307 template <typename Dispatch> getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const10308 VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( 10309 const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, 10310 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, 10311 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10312 { 10313 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( 10314 m_physicalDevice, 10315 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), 10316 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); 10317 } 10318 10319 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10320 template <typename Dispatch> 10321 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const10322 PhysicalDevice::getExternalSemaphorePropertiesKHR( 10323 const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10324 { 10325 VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; 10326 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( 10327 m_physicalDevice, 10328 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), 10329 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); 10330 return externalSemaphoreProperties; 10331 } 10332 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10333 10334 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 10335 //=== VK_KHR_external_semaphore_win32 === 10336 10337 template <typename Dispatch> importSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,Dispatch const & d) const10338 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( 10339 const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, 10340 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10341 { 10342 return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( 10343 m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); 10344 } 10345 10346 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10347 template <typename Dispatch> 10348 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreWin32HandleKHR(const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,Dispatch const & d) const10349 Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, 10350 Dispatch const & d ) const 10351 { 10352 Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( 10353 m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) ); 10354 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); 10355 } 10356 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10357 10358 template <typename Dispatch> getSemaphoreWin32HandleKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const10359 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( 10360 const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, 10361 HANDLE * pHandle, 10362 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10363 { 10364 return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( 10365 m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 10366 } 10367 10368 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10369 template <typename Dispatch> 10370 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR(const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const10371 Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, 10372 Dispatch const & d ) const 10373 { 10374 HANDLE handle; 10375 Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( 10376 m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 10377 return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); 10378 } 10379 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10380 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 10381 10382 //=== VK_KHR_external_semaphore_fd === 10383 10384 template <typename Dispatch> 10385 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,Dispatch const & d) const10386 Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, 10387 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10388 { 10389 return static_cast<Result>( d.vkImportSemaphoreFdKHR( 10390 m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); 10391 } 10392 10393 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10394 template <typename Dispatch> 10395 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreFdKHR(const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,Dispatch const & d) const10396 Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const 10397 { 10398 Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( 10399 m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) ); 10400 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); 10401 } 10402 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10403 10404 template <typename Dispatch> 10405 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const10406 Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, 10407 int * pFd, 10408 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10409 { 10410 return static_cast<Result>( 10411 d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 10412 } 10413 10414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10415 template <typename Dispatch> 10416 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type getSemaphoreFdKHR(const SemaphoreGetFdInfoKHR & getFdInfo,Dispatch const & d) const10417 Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 10418 { 10419 int fd; 10420 Result result = static_cast<Result>( 10421 d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 10422 return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); 10423 } 10424 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10425 10426 //=== VK_KHR_push_descriptor === 10427 10428 template <typename Dispatch> 10429 VULKAN_HPP_INLINE void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,Dispatch const & d) const10430 CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 10431 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 10432 uint32_t set, 10433 uint32_t descriptorWriteCount, 10434 const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, 10435 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10436 { 10437 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 10438 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 10439 static_cast<VkPipelineLayout>( layout ), 10440 set, 10441 descriptorWriteCount, 10442 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); 10443 } 10444 10445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10446 template <typename Dispatch> pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,Dispatch const & d) const10447 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( 10448 VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 10449 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 10450 uint32_t set, 10451 ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, 10452 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10453 { 10454 d.vkCmdPushDescriptorSetKHR( m_commandBuffer, 10455 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 10456 static_cast<VkPipelineLayout>( layout ), 10457 set, 10458 descriptorWrites.size(), 10459 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); 10460 } 10461 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10462 10463 template <typename Dispatch> pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t set,const void * pData,Dispatch const & d) const10464 VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( 10465 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 10466 VULKAN_HPP_NAMESPACE::PipelineLayout layout, 10467 uint32_t set, 10468 const void * pData, 10469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10470 { 10471 d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, 10472 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 10473 static_cast<VkPipelineLayout>( layout ), 10474 set, 10475 pData ); 10476 } 10477 10478 //=== VK_EXT_conditional_rendering === 10479 10480 template <typename Dispatch> beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,Dispatch const & d) const10481 VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( 10482 const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, 10483 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10484 { 10485 d.vkCmdBeginConditionalRenderingEXT( 10486 m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); 10487 } 10488 10489 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10490 template <typename Dispatch> 10491 VULKAN_HPP_INLINE void beginConditionalRenderingEXT(const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,Dispatch const & d) const10492 CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, 10493 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10494 { 10495 d.vkCmdBeginConditionalRenderingEXT( 10496 m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); 10497 } 10498 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10499 10500 template <typename Dispatch> endConditionalRenderingEXT(Dispatch const & d) const10501 VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10502 { 10503 d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); 10504 } 10505 10506 //=== VK_KHR_descriptor_update_template === 10507 10508 template <typename Dispatch> createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,Dispatch const & d) const10509 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( 10510 const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, 10511 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10512 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, 10513 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10514 { 10515 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( 10516 m_device, 10517 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), 10518 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10519 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); 10520 } 10521 10522 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10523 template <typename Dispatch> 10524 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10525 typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR(const DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10526 Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, 10527 Optional<const AllocationCallbacks> allocator, 10528 Dispatch const & d ) const 10529 { 10530 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 10531 Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( 10532 m_device, 10533 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 10534 reinterpret_cast<const VkAllocationCallbacks *>( 10535 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10536 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 10537 return createResultValue( 10538 result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); 10539 } 10540 10541 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10542 template <typename Dispatch> 10543 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10544 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique(const DescriptorUpdateTemplateCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10545 Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, 10546 Optional<const AllocationCallbacks> allocator, 10547 Dispatch const & d ) const 10548 { 10549 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; 10550 Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( 10551 m_device, 10552 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), 10553 reinterpret_cast<const VkAllocationCallbacks *>( 10554 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10555 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); 10556 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10557 return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( 10558 result, 10559 descriptorUpdateTemplate, 10560 VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique", 10561 deleter ); 10562 } 10563 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 10564 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10565 10566 template <typename Dispatch> 10567 VULKAN_HPP_INLINE void destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const10568 Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 10569 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10570 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10571 { 10572 d.vkDestroyDescriptorUpdateTemplateKHR( m_device, 10573 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 10574 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 10575 } 10576 10577 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10578 template <typename Dispatch> 10579 VULKAN_HPP_INLINE void destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10580 Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 10581 Optional<const AllocationCallbacks> allocator, 10582 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10583 { 10584 d.vkDestroyDescriptorUpdateTemplateKHR( 10585 m_device, 10586 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 10587 reinterpret_cast<const VkAllocationCallbacks *>( 10588 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 10589 } 10590 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10591 10592 template <typename Dispatch> 10593 VULKAN_HPP_INLINE void updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,Dispatch const & d) const10594 Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, 10595 VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, 10596 const void * pData, 10597 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10598 { 10599 d.vkUpdateDescriptorSetWithTemplateKHR( m_device, 10600 static_cast<VkDescriptorSet>( descriptorSet ), 10601 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), 10602 pData ); 10603 } 10604 10605 //=== VK_NV_clip_space_w_scaling === 10606 10607 template <typename Dispatch> 10608 VULKAN_HPP_INLINE void setViewportWScalingNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,Dispatch const & d) const10609 CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, 10610 uint32_t viewportCount, 10611 const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, 10612 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10613 { 10614 d.vkCmdSetViewportWScalingNV( m_commandBuffer, 10615 firstViewport, 10616 viewportCount, 10617 reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); 10618 } 10619 10620 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10621 template <typename Dispatch> setViewportWScalingNV(uint32_t firstViewport,ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,Dispatch const & d) const10622 VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( 10623 uint32_t firstViewport, 10624 ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, 10625 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10626 { 10627 d.vkCmdSetViewportWScalingNV( m_commandBuffer, 10628 firstViewport, 10629 viewportWScalings.size(), 10630 reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); 10631 } 10632 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10633 10634 //=== VK_EXT_direct_mode_display === 10635 10636 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 10637 template <typename Dispatch> releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const10638 VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10639 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10640 { 10641 return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 10642 } 10643 #else 10644 template <typename Dispatch> 10645 VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const10646 PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 10647 { 10648 Result result = 10649 static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 10650 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); 10651 } 10652 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10653 10654 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) 10655 //=== VK_EXT_acquire_xlib_display === 10656 10657 template <typename Dispatch> acquireXlibDisplayEXT(Display * dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const10658 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( 10659 Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10660 { 10661 return static_cast<Result>( 10662 d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) ); 10663 } 10664 10665 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10666 template <typename Dispatch> 10667 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireXlibDisplayEXT(Display & dpy,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const10668 PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, 10669 VULKAN_HPP_NAMESPACE::DisplayKHR display, 10670 Dispatch const & d ) const 10671 { 10672 Result result = 10673 static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) ); 10674 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); 10675 } 10676 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10677 10678 template <typename Dispatch> 10679 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRandROutputDisplayEXT(Display * dpy,RROutput rrOutput,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const10680 PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, 10681 RROutput rrOutput, 10682 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 10683 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10684 { 10685 return static_cast<Result>( 10686 d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 10687 } 10688 10689 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10690 template <typename Dispatch> 10691 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(Display & dpy,RROutput rrOutput,Dispatch const & d) const10692 PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 10693 { 10694 VULKAN_HPP_NAMESPACE::DisplayKHR display; 10695 Result result = static_cast<Result>( 10696 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 10697 return createResultValue( 10698 result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); 10699 } 10700 10701 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10702 template <typename Dispatch> 10703 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique(Display & dpy,RROutput rrOutput,Dispatch const & d) const10704 PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const 10705 { 10706 VULKAN_HPP_NAMESPACE::DisplayKHR display; 10707 Result result = static_cast<Result>( 10708 d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 10709 ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d ); 10710 return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( 10711 result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter ); 10712 } 10713 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 10714 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10715 #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ 10716 10717 //=== VK_EXT_display_surface_counter === 10718 10719 template <typename Dispatch> 10720 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,Dispatch const & d) const10721 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, 10722 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, 10723 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10724 { 10725 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 10726 m_physicalDevice, 10727 static_cast<VkSurfaceKHR>( surface ), 10728 reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); 10729 } 10730 10731 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10732 template <typename Dispatch> 10733 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10734 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const10735 PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const 10736 { 10737 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; 10738 Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( 10739 m_physicalDevice, 10740 static_cast<VkSurfaceKHR>( surface ), 10741 reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) ); 10742 return createResultValue( 10743 result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); 10744 } 10745 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10746 10747 //=== VK_EXT_display_control === 10748 10749 template <typename Dispatch> 10750 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,Dispatch const & d) const10751 Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10752 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, 10753 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10754 { 10755 return static_cast<Result>( 10756 d.vkDisplayPowerControlEXT( m_device, 10757 static_cast<VkDisplayKHR>( display ), 10758 reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); 10759 } 10760 10761 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10762 template <typename Dispatch> displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const DisplayPowerInfoEXT & displayPowerInfo,Dispatch const & d) const10763 VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( 10764 VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const 10765 { 10766 Result result = static_cast<Result>( 10767 d.vkDisplayPowerControlEXT( m_device, 10768 static_cast<VkDisplayKHR>( display ), 10769 reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) ); 10770 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); 10771 } 10772 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10773 10774 template <typename Dispatch> 10775 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const10776 Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, 10777 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10778 VULKAN_HPP_NAMESPACE::Fence * pFence, 10779 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10780 { 10781 return static_cast<Result>( 10782 d.vkRegisterDeviceEventEXT( m_device, 10783 reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), 10784 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10785 reinterpret_cast<VkFence *>( pFence ) ) ); 10786 } 10787 10788 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10789 template <typename Dispatch> 10790 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT(const DeviceEventInfoEXT & deviceEventInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10791 Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, 10792 Optional<const AllocationCallbacks> allocator, 10793 Dispatch const & d ) const 10794 { 10795 VULKAN_HPP_NAMESPACE::Fence fence; 10796 Result result = static_cast<Result>( 10797 d.vkRegisterDeviceEventEXT( m_device, 10798 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 10799 reinterpret_cast<const VkAllocationCallbacks *>( 10800 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10801 reinterpret_cast<VkFence *>( &fence ) ) ); 10802 return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); 10803 } 10804 10805 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10806 template <typename Dispatch> 10807 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique(const DeviceEventInfoEXT & deviceEventInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10808 Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, 10809 Optional<const AllocationCallbacks> allocator, 10810 Dispatch const & d ) const 10811 { 10812 VULKAN_HPP_NAMESPACE::Fence fence; 10813 Result result = static_cast<Result>( 10814 d.vkRegisterDeviceEventEXT( m_device, 10815 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), 10816 reinterpret_cast<const VkAllocationCallbacks *>( 10817 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10818 reinterpret_cast<VkFence *>( &fence ) ) ); 10819 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10820 return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( 10821 result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter ); 10822 } 10823 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 10824 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10825 10826 template <typename Dispatch> 10827 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const10828 Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10829 const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, 10830 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 10831 VULKAN_HPP_NAMESPACE::Fence * pFence, 10832 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10833 { 10834 return static_cast<Result>( 10835 d.vkRegisterDisplayEventEXT( m_device, 10836 static_cast<VkDisplayKHR>( display ), 10837 reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), 10838 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 10839 reinterpret_cast<VkFence *>( pFence ) ) ); 10840 } 10841 10842 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10843 template <typename Dispatch> 10844 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const DisplayEventInfoEXT & displayEventInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10845 Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10846 const DisplayEventInfoEXT & displayEventInfo, 10847 Optional<const AllocationCallbacks> allocator, 10848 Dispatch const & d ) const 10849 { 10850 VULKAN_HPP_NAMESPACE::Fence fence; 10851 Result result = static_cast<Result>( 10852 d.vkRegisterDisplayEventEXT( m_device, 10853 static_cast<VkDisplayKHR>( display ), 10854 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 10855 reinterpret_cast<const VkAllocationCallbacks *>( 10856 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10857 reinterpret_cast<VkFence *>( &fence ) ) ); 10858 return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); 10859 } 10860 10861 # ifndef VULKAN_HPP_NO_SMART_HANDLE 10862 template <typename Dispatch> 10863 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const DisplayEventInfoEXT & displayEventInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const10864 Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, 10865 const DisplayEventInfoEXT & displayEventInfo, 10866 Optional<const AllocationCallbacks> allocator, 10867 Dispatch const & d ) const 10868 { 10869 VULKAN_HPP_NAMESPACE::Fence fence; 10870 Result result = static_cast<Result>( 10871 d.vkRegisterDisplayEventEXT( m_device, 10872 static_cast<VkDisplayKHR>( display ), 10873 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), 10874 reinterpret_cast<const VkAllocationCallbacks *>( 10875 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 10876 reinterpret_cast<VkFence *>( &fence ) ) ); 10877 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 10878 return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( 10879 result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter ); 10880 } 10881 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 10882 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10883 10884 template <typename Dispatch> 10885 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,uint64_t * pCounterValue,Dispatch const & d) const10886 Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 10887 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, 10888 uint64_t * pCounterValue, 10889 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10890 { 10891 return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, 10892 static_cast<VkSwapchainKHR>( swapchain ), 10893 static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), 10894 pCounterValue ) ); 10895 } 10896 10897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10898 template <typename Dispatch> 10899 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,Dispatch const & d) const10900 Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 10901 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, 10902 Dispatch const & d ) const 10903 { 10904 uint64_t counterValue; 10905 Result result = 10906 static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, 10907 static_cast<VkSwapchainKHR>( swapchain ), 10908 static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), 10909 &counterValue ) ); 10910 return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); 10911 } 10912 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10913 10914 //=== VK_GOOGLE_display_timing === 10915 10916 template <typename Dispatch> 10917 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,Dispatch const & d) const10918 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 10919 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, 10920 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10921 { 10922 return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( 10923 m_device, 10924 static_cast<VkSwapchainKHR>( swapchain ), 10925 reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); 10926 } 10927 10928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10929 template <typename Dispatch> 10930 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 10931 typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const10932 Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 10933 { 10934 VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; 10935 Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( 10936 m_device, 10937 static_cast<VkSwapchainKHR>( swapchain ), 10938 reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) ); 10939 return createResultValue( 10940 result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); 10941 } 10942 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 10943 10944 template <typename Dispatch> 10945 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pPresentationTimingCount,VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,Dispatch const & d) const10946 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 10947 uint32_t * pPresentationTimingCount, 10948 VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, 10949 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 10950 { 10951 return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( 10952 m_device, 10953 static_cast<VkSwapchainKHR>( swapchain ), 10954 pPresentationTimingCount, 10955 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); 10956 } 10957 10958 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 10959 template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch> 10960 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10961 typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const10962 Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 10963 { 10964 std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; 10965 uint32_t presentationTimingCount; 10966 Result result; 10967 do 10968 { 10969 result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( 10970 m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 10971 if ( ( result == Result::eSuccess ) && presentationTimingCount ) 10972 { 10973 presentationTimings.resize( presentationTimingCount ); 10974 result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( 10975 m_device, 10976 static_cast<VkSwapchainKHR>( swapchain ), 10977 &presentationTimingCount, 10978 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 10979 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 10980 } 10981 } while ( result == Result::eIncomplete ); 10982 if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) ) 10983 { 10984 presentationTimings.resize( presentationTimingCount ); 10985 } 10986 return createResultValue( 10987 result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 10988 } 10989 10990 template < 10991 typename PastPresentationTimingGOOGLEAllocator, 10992 typename Dispatch, 10993 typename B, 10994 typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type> 10995 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 10996 typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,Dispatch const & d) const10997 Device::getPastPresentationTimingGOOGLE( 10998 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, 10999 PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, 11000 Dispatch const & d ) const 11001 { 11002 std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( 11003 pastPresentationTimingGOOGLEAllocator ); 11004 uint32_t presentationTimingCount; 11005 Result result; 11006 do 11007 { 11008 result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( 11009 m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); 11010 if ( ( result == Result::eSuccess ) && presentationTimingCount ) 11011 { 11012 presentationTimings.resize( presentationTimingCount ); 11013 result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( 11014 m_device, 11015 static_cast<VkSwapchainKHR>( swapchain ), 11016 &presentationTimingCount, 11017 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); 11018 VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); 11019 } 11020 } while ( result == Result::eIncomplete ); 11021 if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) ) 11022 { 11023 presentationTimings.resize( presentationTimingCount ); 11024 } 11025 return createResultValue( 11026 result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); 11027 } 11028 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11029 11030 //=== VK_EXT_discard_rectangles === 11031 11032 template <typename Dispatch> setDiscardRectangleEXT(uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,Dispatch const & d) const11033 VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 11034 uint32_t discardRectangleCount, 11035 const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, 11036 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11037 { 11038 d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, 11039 firstDiscardRectangle, 11040 discardRectangleCount, 11041 reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); 11042 } 11043 11044 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11045 template <typename Dispatch> 11046 VULKAN_HPP_INLINE void setDiscardRectangleEXT(uint32_t firstDiscardRectangle,ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,Dispatch const & d) const11047 CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, 11048 ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, 11049 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11050 { 11051 d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, 11052 firstDiscardRectangle, 11053 discardRectangles.size(), 11054 reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); 11055 } 11056 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11057 11058 //=== VK_EXT_hdr_metadata === 11059 11060 template <typename Dispatch> setHdrMetadataEXT(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,Dispatch const & d) const11061 VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, 11062 const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, 11063 const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, 11064 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11065 { 11066 d.vkSetHdrMetadataEXT( m_device, 11067 swapchainCount, 11068 reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), 11069 reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); 11070 } 11071 11072 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11073 template <typename Dispatch> 11074 VULKAN_HPP_INLINE void setHdrMetadataEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,Dispatch const & d) const11075 Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, 11076 ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, 11077 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 11078 { 11079 # ifdef VULKAN_HPP_NO_EXCEPTIONS 11080 VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); 11081 # else 11082 if ( swapchains.size() != metadata.size() ) 11083 { 11084 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 11085 "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); 11086 } 11087 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 11088 11089 d.vkSetHdrMetadataEXT( m_device, 11090 swapchains.size(), 11091 reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), 11092 reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); 11093 } 11094 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11095 11096 //=== VK_KHR_create_renderpass2 === 11097 11098 template <typename Dispatch> 11099 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const11100 Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, 11101 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 11102 VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, 11103 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11104 { 11105 return static_cast<Result>( 11106 d.vkCreateRenderPass2KHR( m_device, 11107 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), 11108 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 11109 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); 11110 } 11111 11112 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11113 template <typename Dispatch> 11114 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 11115 typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR(const RenderPassCreateInfo2 & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const11116 Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, 11117 Optional<const AllocationCallbacks> allocator, 11118 Dispatch const & d ) const 11119 { 11120 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 11121 Result result = static_cast<Result>( 11122 d.vkCreateRenderPass2KHR( m_device, 11123 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 11124 reinterpret_cast<const VkAllocationCallbacks *>( 11125 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11126 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 11127 return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); 11128 } 11129 11130 # ifndef VULKAN_HPP_NO_SMART_HANDLE 11131 template <typename Dispatch> 11132 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 11133 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique(const RenderPassCreateInfo2 & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const11134 Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, 11135 Optional<const AllocationCallbacks> allocator, 11136 Dispatch const & d ) const 11137 { 11138 VULKAN_HPP_NAMESPACE::RenderPass renderPass; 11139 Result result = static_cast<Result>( 11140 d.vkCreateRenderPass2KHR( m_device, 11141 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), 11142 reinterpret_cast<const VkAllocationCallbacks *>( 11143 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 11144 reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); 11145 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 11146 return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( 11147 result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter ); 11148 } 11149 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 11150 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11151 11152 template <typename Dispatch> 11153 VULKAN_HPP_INLINE void beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const11154 CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, 11155 const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 11156 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11157 { 11158 d.vkCmdBeginRenderPass2KHR( m_commandBuffer, 11159 reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), 11160 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); 11161 } 11162 11163 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11164 template <typename Dispatch> beginRenderPass2KHR(const RenderPassBeginInfo & renderPassBegin,const SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const11165 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, 11166 const SubpassBeginInfo & subpassBeginInfo, 11167 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11168 { 11169 d.vkCmdBeginRenderPass2KHR( m_commandBuffer, 11170 reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), 11171 reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); 11172 } 11173 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11174 11175 template <typename Dispatch> 11176 VULKAN_HPP_INLINE void nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const11177 CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, 11178 const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 11179 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11180 { 11181 d.vkCmdNextSubpass2KHR( m_commandBuffer, 11182 reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), 11183 reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 11184 } 11185 11186 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11187 template <typename Dispatch> nextSubpass2KHR(const SubpassBeginInfo & subpassBeginInfo,const SubpassEndInfo & subpassEndInfo,Dispatch const & d) const11188 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, 11189 const SubpassEndInfo & subpassEndInfo, 11190 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11191 { 11192 d.vkCmdNextSubpass2KHR( m_commandBuffer, 11193 reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), 11194 reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 11195 } 11196 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11197 11198 template <typename Dispatch> endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const11199 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, 11200 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11201 { 11202 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); 11203 } 11204 11205 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11206 template <typename Dispatch> endRenderPass2KHR(const SubpassEndInfo & subpassEndInfo,Dispatch const & d) const11207 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, 11208 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11209 { 11210 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); 11211 } 11212 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11213 11214 //=== VK_KHR_shared_presentable_image === 11215 11216 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 11217 template <typename Dispatch> getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const11218 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( 11219 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11220 { 11221 return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 11222 } 11223 #else 11224 template <typename Dispatch> 11225 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const11226 Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 11227 { 11228 Result result = 11229 static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 11230 return createResultValue( 11231 result, 11232 VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", 11233 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); 11234 } 11235 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11236 11237 //=== VK_KHR_external_fence_capabilities === 11238 11239 template <typename Dispatch> getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const11240 VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( 11241 const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, 11242 VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, 11243 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11244 { 11245 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( 11246 m_physicalDevice, 11247 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), 11248 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); 11249 } 11250 11251 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11252 template <typename Dispatch> 11253 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(const PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const11254 PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, 11255 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11256 { 11257 VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; 11258 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( 11259 m_physicalDevice, 11260 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), 11261 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); 11262 return externalFenceProperties; 11263 } 11264 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11265 11266 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 11267 //=== VK_KHR_external_fence_win32 === 11268 11269 template <typename Dispatch> importFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,Dispatch const & d) const11270 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( 11271 const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, 11272 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11273 { 11274 return static_cast<Result>( d.vkImportFenceWin32HandleKHR( 11275 m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); 11276 } 11277 11278 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11279 template <typename Dispatch> 11280 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceWin32HandleKHR(const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,Dispatch const & d) const11281 Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, 11282 Dispatch const & d ) const 11283 { 11284 Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( 11285 m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) ); 11286 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); 11287 } 11288 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11289 11290 template <typename Dispatch> 11291 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getFenceWin32HandleKHR(const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle,Dispatch const & d) const11292 Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, 11293 HANDLE * pHandle, 11294 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11295 { 11296 return static_cast<Result>( d.vkGetFenceWin32HandleKHR( 11297 m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); 11298 } 11299 11300 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11301 template <typename Dispatch> 11302 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR(const FenceGetWin32HandleInfoKHR & getWin32HandleInfo,Dispatch const & d) const11303 Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const 11304 { 11305 HANDLE handle; 11306 Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( 11307 m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); 11308 return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); 11309 } 11310 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11311 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 11312 11313 //=== VK_KHR_external_fence_fd === 11314 11315 template <typename Dispatch> 11316 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,Dispatch const & d) const11317 Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, 11318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11319 { 11320 return static_cast<Result>( 11321 d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); 11322 } 11323 11324 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11325 template <typename Dispatch> 11326 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importFenceFdKHR(const ImportFenceFdInfoKHR & importFenceFdInfo,Dispatch const & d) const11327 Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const 11328 { 11329 Result result = static_cast<Result>( 11330 d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) ); 11331 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); 11332 } 11333 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11334 11335 template <typename Dispatch> 11336 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const11337 Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, 11338 int * pFd, 11339 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11340 { 11341 return static_cast<Result>( 11342 d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); 11343 } 11344 11345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11346 template <typename Dispatch> 11347 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type getFenceFdKHR(const FenceGetFdInfoKHR & getFdInfo,Dispatch const & d) const11348 Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const 11349 { 11350 int fd; 11351 Result result = static_cast<Result>( 11352 d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) ); 11353 return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); 11354 } 11355 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11356 11357 //=== VK_KHR_performance_query === 11358 11359 template <typename Dispatch> enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,uint32_t * pCounterCount,VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,Dispatch const & d) const11360 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( 11361 uint32_t queueFamilyIndex, 11362 uint32_t * pCounterCount, 11363 VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, 11364 VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, 11365 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11366 { 11367 return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11368 m_physicalDevice, 11369 queueFamilyIndex, 11370 pCounterCount, 11371 reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), 11372 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); 11373 } 11374 11375 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11376 template <typename Allocator, typename Dispatch> 11377 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 11378 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11379 typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,Dispatch const & d) const11380 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( 11381 uint32_t queueFamilyIndex, 11382 ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters, 11383 Dispatch const & d ) const 11384 { 11385 std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions; 11386 uint32_t counterCount; 11387 Result result; 11388 do 11389 { 11390 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11391 m_physicalDevice, 11392 queueFamilyIndex, 11393 counters.size(), 11394 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 11395 nullptr ) ); 11396 if ( ( result == Result::eSuccess ) && counterCount ) 11397 { 11398 counterDescriptions.resize( counterCount ); 11399 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11400 m_physicalDevice, 11401 queueFamilyIndex, 11402 counters.size(), 11403 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 11404 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 11405 } 11406 } while ( result == Result::eIncomplete ); 11407 if ( result == Result::eSuccess ) 11408 { 11409 VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); 11410 counterDescriptions.resize( counterCount ); 11411 } 11412 return createResultValue( result, 11413 counterDescriptions, 11414 VULKAN_HPP_NAMESPACE_STRING 11415 "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 11416 } 11417 11418 template < 11419 typename Allocator, 11420 typename Dispatch, 11421 typename B, 11422 typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type> 11423 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 11424 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11425 typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,Allocator const & vectorAllocator,Dispatch const & d) const11426 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( 11427 uint32_t queueFamilyIndex, 11428 ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters, 11429 Allocator const & vectorAllocator, 11430 Dispatch const & d ) const 11431 { 11432 std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions( vectorAllocator ); 11433 uint32_t counterCount; 11434 Result result; 11435 do 11436 { 11437 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11438 m_physicalDevice, 11439 queueFamilyIndex, 11440 counters.size(), 11441 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 11442 nullptr ) ); 11443 if ( ( result == Result::eSuccess ) && counterCount ) 11444 { 11445 counterDescriptions.resize( counterCount ); 11446 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11447 m_physicalDevice, 11448 queueFamilyIndex, 11449 counters.size(), 11450 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 11451 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 11452 } 11453 } while ( result == Result::eIncomplete ); 11454 if ( result == Result::eSuccess ) 11455 { 11456 VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); 11457 counterDescriptions.resize( counterCount ); 11458 } 11459 return createResultValue( result, 11460 counterDescriptions, 11461 VULKAN_HPP_NAMESPACE_STRING 11462 "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 11463 } 11464 11465 template <typename PerformanceCounterKHRAllocator, 11466 typename PerformanceCounterDescriptionKHRAllocator, 11467 typename Dispatch> 11468 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 11469 std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 11470 std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,Dispatch const & d) const11471 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, 11472 Dispatch const & d ) const 11473 { 11474 std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 11475 std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 11476 data; 11477 std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first; 11478 std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = 11479 data.second; 11480 uint32_t counterCount; 11481 Result result; 11482 do 11483 { 11484 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11485 m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 11486 if ( ( result == Result::eSuccess ) && counterCount ) 11487 { 11488 counters.resize( counterCount ); 11489 counterDescriptions.resize( counterCount ); 11490 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11491 m_physicalDevice, 11492 queueFamilyIndex, 11493 &counterCount, 11494 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 11495 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 11496 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 11497 } 11498 } while ( result == Result::eIncomplete ); 11499 if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) 11500 { 11501 counters.resize( counterCount ); 11502 counterDescriptions.resize( counterCount ); 11503 } 11504 return createResultValue( 11505 result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 11506 } 11507 11508 template <typename PerformanceCounterKHRAllocator, 11509 typename PerformanceCounterDescriptionKHRAllocator, 11510 typename Dispatch, 11511 typename B1, 11512 typename B2, 11513 typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && 11514 std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, 11515 int>::type> 11516 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 11517 std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 11518 std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,Dispatch const & d) const11519 PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( 11520 uint32_t queueFamilyIndex, 11521 PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, 11522 PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, 11523 Dispatch const & d ) const 11524 { 11525 std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, 11526 std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> 11527 data( std::piecewise_construct, 11528 std::forward_as_tuple( performanceCounterKHRAllocator ), 11529 std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); 11530 std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first; 11531 std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = 11532 data.second; 11533 uint32_t counterCount; 11534 Result result; 11535 do 11536 { 11537 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11538 m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); 11539 if ( ( result == Result::eSuccess ) && counterCount ) 11540 { 11541 counters.resize( counterCount ); 11542 counterDescriptions.resize( counterCount ); 11543 result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( 11544 m_physicalDevice, 11545 queueFamilyIndex, 11546 &counterCount, 11547 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), 11548 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); 11549 VULKAN_HPP_ASSERT( counterCount <= counters.size() ); 11550 } 11551 } while ( result == Result::eIncomplete ); 11552 if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) 11553 { 11554 counters.resize( counterCount ); 11555 counterDescriptions.resize( counterCount ); 11556 } 11557 return createResultValue( 11558 result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); 11559 } 11560 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11561 11562 template <typename Dispatch> getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,uint32_t * pNumPasses,Dispatch const & d) const11563 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( 11564 const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, 11565 uint32_t * pNumPasses, 11566 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11567 { 11568 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 11569 m_physicalDevice, 11570 reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), 11571 pNumPasses ); 11572 } 11573 11574 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11575 template <typename Dispatch> getQueueFamilyPerformanceQueryPassesKHR(const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,Dispatch const & d) const11576 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( 11577 const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11578 { 11579 uint32_t numPasses; 11580 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( 11581 m_physicalDevice, 11582 reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), 11583 &numPasses ); 11584 return numPasses; 11585 } 11586 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11587 11588 template <typename Dispatch> acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,Dispatch const & d) const11589 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( 11590 const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11591 { 11592 return static_cast<Result>( 11593 d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); 11594 } 11595 11596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11597 template <typename Dispatch> 11598 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireProfilingLockKHR(const AcquireProfilingLockInfoKHR & info,Dispatch const & d) const11599 Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const 11600 { 11601 Result result = static_cast<Result>( 11602 d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) ); 11603 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); 11604 } 11605 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11606 11607 template <typename Dispatch> releaseProfilingLockKHR(Dispatch const & d) const11608 VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11609 { 11610 d.vkReleaseProfilingLockKHR( m_device ); 11611 } 11612 11613 //=== VK_KHR_get_surface_capabilities2 === 11614 11615 template <typename Dispatch> getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,Dispatch const & d) const11616 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( 11617 const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 11618 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, 11619 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11620 { 11621 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( 11622 m_physicalDevice, 11623 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 11624 reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); 11625 } 11626 11627 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11628 template <typename Dispatch> 11629 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 11630 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const11631 PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 11632 Dispatch const & d ) const 11633 { 11634 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; 11635 Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( 11636 m_physicalDevice, 11637 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 11638 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 11639 return createResultValue( 11640 result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 11641 } 11642 11643 template <typename X, typename Y, typename... Z, typename Dispatch> 11644 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const11645 PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 11646 Dispatch const & d ) const 11647 { 11648 StructureChain<X, Y, Z...> structureChain; 11649 VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = 11650 structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>(); 11651 Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( 11652 m_physicalDevice, 11653 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 11654 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); 11655 return createResultValue( 11656 result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); 11657 } 11658 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11659 11660 template <typename Dispatch> 11661 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,Dispatch const & d) const11662 PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 11663 uint32_t * pSurfaceFormatCount, 11664 VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, 11665 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11666 { 11667 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 11668 m_physicalDevice, 11669 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 11670 pSurfaceFormatCount, 11671 reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); 11672 } 11673 11674 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11675 template <typename SurfaceFormat2KHRAllocator, typename Dispatch> 11676 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11677 typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const11678 PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const 11679 { 11680 std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; 11681 uint32_t surfaceFormatCount; 11682 Result result; 11683 do 11684 { 11685 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 11686 m_physicalDevice, 11687 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 11688 &surfaceFormatCount, 11689 nullptr ) ); 11690 if ( ( result == Result::eSuccess ) && surfaceFormatCount ) 11691 { 11692 surfaceFormats.resize( surfaceFormatCount ); 11693 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 11694 m_physicalDevice, 11695 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 11696 &surfaceFormatCount, 11697 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 11698 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 11699 } 11700 } while ( result == Result::eIncomplete ); 11701 if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) 11702 { 11703 surfaceFormats.resize( surfaceFormatCount ); 11704 } 11705 return createResultValue( 11706 result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 11707 } 11708 11709 template <typename SurfaceFormat2KHRAllocator, 11710 typename Dispatch, 11711 typename B, 11712 typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type> 11713 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11714 typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,Dispatch const & d) const11715 PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 11716 SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, 11717 Dispatch const & d ) const 11718 { 11719 std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); 11720 uint32_t surfaceFormatCount; 11721 Result result; 11722 do 11723 { 11724 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 11725 m_physicalDevice, 11726 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 11727 &surfaceFormatCount, 11728 nullptr ) ); 11729 if ( ( result == Result::eSuccess ) && surfaceFormatCount ) 11730 { 11731 surfaceFormats.resize( surfaceFormatCount ); 11732 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( 11733 m_physicalDevice, 11734 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 11735 &surfaceFormatCount, 11736 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); 11737 VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); 11738 } 11739 } while ( result == Result::eIncomplete ); 11740 if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) 11741 { 11742 surfaceFormats.resize( surfaceFormatCount ); 11743 } 11744 return createResultValue( 11745 result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); 11746 } 11747 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11748 11749 //=== VK_KHR_get_display_properties2 === 11750 11751 template <typename Dispatch> 11752 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,Dispatch const & d) const11753 PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, 11754 VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, 11755 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11756 { 11757 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( 11758 m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); 11759 } 11760 11761 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11762 template <typename DisplayProperties2KHRAllocator, typename Dispatch> 11763 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11764 typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(Dispatch const & d) const11765 PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const 11766 { 11767 std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; 11768 uint32_t propertyCount; 11769 Result result; 11770 do 11771 { 11772 result = 11773 static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 11774 if ( ( result == Result::eSuccess ) && propertyCount ) 11775 { 11776 properties.resize( propertyCount ); 11777 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( 11778 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 11779 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11780 } 11781 } while ( result == Result::eIncomplete ); 11782 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 11783 { 11784 properties.resize( propertyCount ); 11785 } 11786 return createResultValue( 11787 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 11788 } 11789 11790 template <typename DisplayProperties2KHRAllocator, 11791 typename Dispatch, 11792 typename B, 11793 typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type> 11794 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11795 typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR(DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,Dispatch const & d) const11796 PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, 11797 Dispatch const & d ) const 11798 { 11799 std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); 11800 uint32_t propertyCount; 11801 Result result; 11802 do 11803 { 11804 result = 11805 static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 11806 if ( ( result == Result::eSuccess ) && propertyCount ) 11807 { 11808 properties.resize( propertyCount ); 11809 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( 11810 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); 11811 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11812 } 11813 } while ( result == Result::eIncomplete ); 11814 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 11815 { 11816 properties.resize( propertyCount ); 11817 } 11818 return createResultValue( 11819 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); 11820 } 11821 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11822 11823 template <typename Dispatch> 11824 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayPlaneProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,Dispatch const & d) const11825 PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, 11826 VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, 11827 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11828 { 11829 return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 11830 m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); 11831 } 11832 11833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11834 template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch> 11835 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11836 typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(Dispatch const & d) const11837 PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const 11838 { 11839 std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; 11840 uint32_t propertyCount; 11841 Result result; 11842 do 11843 { 11844 result = static_cast<Result>( 11845 d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 11846 if ( ( result == Result::eSuccess ) && propertyCount ) 11847 { 11848 properties.resize( propertyCount ); 11849 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 11850 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 11851 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11852 } 11853 } while ( result == Result::eIncomplete ); 11854 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 11855 { 11856 properties.resize( propertyCount ); 11857 } 11858 return createResultValue( 11859 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 11860 } 11861 11862 template <typename DisplayPlaneProperties2KHRAllocator, 11863 typename Dispatch, 11864 typename B, 11865 typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type> 11866 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11867 typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR(DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,Dispatch const & d) const11868 PhysicalDevice::getDisplayPlaneProperties2KHR( 11869 DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const 11870 { 11871 std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( 11872 displayPlaneProperties2KHRAllocator ); 11873 uint32_t propertyCount; 11874 Result result; 11875 do 11876 { 11877 result = static_cast<Result>( 11878 d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); 11879 if ( ( result == Result::eSuccess ) && propertyCount ) 11880 { 11881 properties.resize( propertyCount ); 11882 result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( 11883 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); 11884 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11885 } 11886 } while ( result == Result::eIncomplete ); 11887 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 11888 { 11889 properties.resize( propertyCount ); 11890 } 11891 return createResultValue( 11892 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); 11893 } 11894 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11895 11896 template <typename Dispatch> 11897 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,Dispatch const & d) const11898 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, 11899 uint32_t * pPropertyCount, 11900 VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, 11901 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11902 { 11903 return static_cast<Result>( 11904 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, 11905 static_cast<VkDisplayKHR>( display ), 11906 pPropertyCount, 11907 reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); 11908 } 11909 11910 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11911 template <typename DisplayModeProperties2KHRAllocator, typename Dispatch> 11912 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11913 typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const11914 PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 11915 { 11916 std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties; 11917 uint32_t propertyCount; 11918 Result result; 11919 do 11920 { 11921 result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( 11922 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 11923 if ( ( result == Result::eSuccess ) && propertyCount ) 11924 { 11925 properties.resize( propertyCount ); 11926 result = static_cast<Result>( 11927 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, 11928 static_cast<VkDisplayKHR>( display ), 11929 &propertyCount, 11930 reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 11931 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11932 } 11933 } while ( result == Result::eIncomplete ); 11934 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 11935 { 11936 properties.resize( propertyCount ); 11937 } 11938 return createResultValue( 11939 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 11940 } 11941 11942 template <typename DisplayModeProperties2KHRAllocator, 11943 typename Dispatch, 11944 typename B, 11945 typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type> 11946 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 11947 typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,Dispatch const & d) const11948 PhysicalDevice::getDisplayModeProperties2KHR( 11949 VULKAN_HPP_NAMESPACE::DisplayKHR display, 11950 DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, 11951 Dispatch const & d ) const 11952 { 11953 std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( 11954 displayModeProperties2KHRAllocator ); 11955 uint32_t propertyCount; 11956 Result result; 11957 do 11958 { 11959 result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( 11960 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); 11961 if ( ( result == Result::eSuccess ) && propertyCount ) 11962 { 11963 properties.resize( propertyCount ); 11964 result = static_cast<Result>( 11965 d.vkGetDisplayModeProperties2KHR( m_physicalDevice, 11966 static_cast<VkDisplayKHR>( display ), 11967 &propertyCount, 11968 reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); 11969 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 11970 } 11971 } while ( result == Result::eIncomplete ); 11972 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 11973 { 11974 properties.resize( propertyCount ); 11975 } 11976 return createResultValue( 11977 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); 11978 } 11979 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 11980 11981 template <typename Dispatch> getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,Dispatch const & d) const11982 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( 11983 const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, 11984 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, 11985 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 11986 { 11987 return static_cast<Result>( 11988 d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 11989 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), 11990 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); 11991 } 11992 11993 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 11994 template <typename Dispatch> 11995 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 11996 typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR(const DisplayPlaneInfo2KHR & displayPlaneInfo,Dispatch const & d) const11997 PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, 11998 Dispatch const & d ) const 11999 { 12000 VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; 12001 Result result = static_cast<Result>( 12002 d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, 12003 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), 12004 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) ); 12005 return createResultValue( 12006 result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); 12007 } 12008 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12009 12010 #if defined( VK_USE_PLATFORM_IOS_MVK ) 12011 //=== VK_MVK_ios_surface === 12012 12013 template <typename Dispatch> 12014 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createIOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const12015 Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, 12016 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12017 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 12018 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12019 { 12020 return static_cast<Result>( 12021 d.vkCreateIOSSurfaceMVK( m_instance, 12022 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), 12023 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12024 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 12025 } 12026 12027 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12028 template <typename Dispatch> 12029 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12030 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK(const IOSSurfaceCreateInfoMVK & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12031 Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, 12032 Optional<const AllocationCallbacks> allocator, 12033 Dispatch const & d ) const 12034 { 12035 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12036 Result result = static_cast<Result>( 12037 d.vkCreateIOSSurfaceMVK( m_instance, 12038 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 12039 reinterpret_cast<const VkAllocationCallbacks *>( 12040 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12041 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12042 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); 12043 } 12044 12045 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12046 template <typename Dispatch> 12047 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12048 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique(const IOSSurfaceCreateInfoMVK & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12049 Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, 12050 Optional<const AllocationCallbacks> allocator, 12051 Dispatch const & d ) const 12052 { 12053 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12054 Result result = static_cast<Result>( 12055 d.vkCreateIOSSurfaceMVK( m_instance, 12056 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), 12057 reinterpret_cast<const VkAllocationCallbacks *>( 12058 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12059 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12060 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 12061 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 12062 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter ); 12063 } 12064 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 12065 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12066 #endif /*VK_USE_PLATFORM_IOS_MVK*/ 12067 12068 #if defined( VK_USE_PLATFORM_MACOS_MVK ) 12069 //=== VK_MVK_macos_surface === 12070 12071 template <typename Dispatch> 12072 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createMacOSSurfaceMVK(const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const12073 Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, 12074 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12075 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 12076 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12077 { 12078 return static_cast<Result>( 12079 d.vkCreateMacOSSurfaceMVK( m_instance, 12080 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), 12081 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12082 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 12083 } 12084 12085 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12086 template <typename Dispatch> 12087 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12088 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK(const MacOSSurfaceCreateInfoMVK & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12089 Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, 12090 Optional<const AllocationCallbacks> allocator, 12091 Dispatch const & d ) const 12092 { 12093 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12094 Result result = static_cast<Result>( 12095 d.vkCreateMacOSSurfaceMVK( m_instance, 12096 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 12097 reinterpret_cast<const VkAllocationCallbacks *>( 12098 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12099 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12100 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); 12101 } 12102 12103 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12104 template <typename Dispatch> 12105 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12106 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique(const MacOSSurfaceCreateInfoMVK & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12107 Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, 12108 Optional<const AllocationCallbacks> allocator, 12109 Dispatch const & d ) const 12110 { 12111 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 12112 Result result = static_cast<Result>( 12113 d.vkCreateMacOSSurfaceMVK( m_instance, 12114 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), 12115 reinterpret_cast<const VkAllocationCallbacks *>( 12116 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12117 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 12118 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 12119 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 12120 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter ); 12121 } 12122 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 12123 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12124 #endif /*VK_USE_PLATFORM_MACOS_MVK*/ 12125 12126 //=== VK_EXT_debug_utils === 12127 12128 template <typename Dispatch> setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,Dispatch const & d) const12129 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( 12130 const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12131 { 12132 return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( 12133 m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); 12134 } 12135 12136 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12137 template <typename Dispatch> 12138 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectNameEXT(const DebugUtilsObjectNameInfoEXT & nameInfo,Dispatch const & d) const12139 Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const 12140 { 12141 Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( 12142 m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) ); 12143 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); 12144 } 12145 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12146 12147 template <typename Dispatch> setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,Dispatch const & d) const12148 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( 12149 const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12150 { 12151 return static_cast<Result>( 12152 d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); 12153 } 12154 12155 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12156 template <typename Dispatch> 12157 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setDebugUtilsObjectTagEXT(const DebugUtilsObjectTagInfoEXT & tagInfo,Dispatch const & d) const12158 Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const 12159 { 12160 Result result = static_cast<Result>( 12161 d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) ); 12162 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); 12163 } 12164 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12165 12166 template <typename Dispatch> beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const12167 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 12168 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12169 { 12170 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 12171 } 12172 12173 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12174 template <typename Dispatch> beginDebugUtilsLabelEXT(const DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const12175 VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, 12176 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12177 { 12178 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 12179 } 12180 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12181 12182 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const12183 VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12184 { 12185 d.vkQueueEndDebugUtilsLabelEXT( m_queue ); 12186 } 12187 12188 template <typename Dispatch> insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const12189 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 12190 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12191 { 12192 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 12193 } 12194 12195 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12196 template <typename Dispatch> insertDebugUtilsLabelEXT(const DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const12197 VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, 12198 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12199 { 12200 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 12201 } 12202 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12203 12204 template <typename Dispatch> 12205 VULKAN_HPP_INLINE void beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const12206 CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 12207 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12208 { 12209 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 12210 } 12211 12212 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12213 template <typename Dispatch> beginDebugUtilsLabelEXT(const DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const12214 VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, 12215 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12216 { 12217 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 12218 } 12219 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12220 12221 template <typename Dispatch> endDebugUtilsLabelEXT(Dispatch const & d) const12222 VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12223 { 12224 d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); 12225 } 12226 12227 template <typename Dispatch> 12228 VULKAN_HPP_INLINE void insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const12229 CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, 12230 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12231 { 12232 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); 12233 } 12234 12235 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12236 template <typename Dispatch> insertDebugUtilsLabelEXT(const DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const12237 VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, 12238 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12239 { 12240 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); 12241 } 12242 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12243 12244 template <typename Dispatch> 12245 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,Dispatch const & d) const12246 Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, 12247 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12248 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, 12249 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12250 { 12251 return static_cast<Result>( 12252 d.vkCreateDebugUtilsMessengerEXT( m_instance, 12253 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), 12254 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12255 reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); 12256 } 12257 12258 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12259 template <typename Dispatch> 12260 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(const DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12261 Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, 12262 Optional<const AllocationCallbacks> allocator, 12263 Dispatch const & d ) const 12264 { 12265 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 12266 Result result = static_cast<Result>( 12267 d.vkCreateDebugUtilsMessengerEXT( m_instance, 12268 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 12269 reinterpret_cast<const VkAllocationCallbacks *>( 12270 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12271 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 12272 return createResultValue( 12273 result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); 12274 } 12275 12276 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12277 template <typename Dispatch> 12278 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique(const DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12279 Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, 12280 Optional<const AllocationCallbacks> allocator, 12281 Dispatch const & d ) const 12282 { 12283 VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; 12284 Result result = static_cast<Result>( 12285 d.vkCreateDebugUtilsMessengerEXT( m_instance, 12286 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), 12287 reinterpret_cast<const VkAllocationCallbacks *>( 12288 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12289 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); 12290 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 12291 return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( 12292 result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter ); 12293 } 12294 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 12295 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12296 12297 template <typename Dispatch> 12298 VULKAN_HPP_INLINE void destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12299 Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 12300 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12301 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12302 { 12303 d.vkDestroyDebugUtilsMessengerEXT( m_instance, 12304 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 12305 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12306 } 12307 12308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12309 template <typename Dispatch> 12310 VULKAN_HPP_INLINE void destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12311 Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 12312 Optional<const AllocationCallbacks> allocator, 12313 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12314 { 12315 d.vkDestroyDebugUtilsMessengerEXT( 12316 m_instance, 12317 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 12318 reinterpret_cast<const VkAllocationCallbacks *>( 12319 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12320 } 12321 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12322 12323 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12324 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 12325 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12326 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12327 { 12328 d.vkDestroyDebugUtilsMessengerEXT( m_instance, 12329 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 12330 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12331 } 12332 12333 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12334 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12335 VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, 12336 Optional<const AllocationCallbacks> allocator, 12337 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12338 { 12339 d.vkDestroyDebugUtilsMessengerEXT( 12340 m_instance, 12341 static_cast<VkDebugUtilsMessengerEXT>( messenger ), 12342 reinterpret_cast<const VkAllocationCallbacks *>( 12343 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12344 } 12345 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12346 12347 template <typename Dispatch> submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,Dispatch const & d) const12348 VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( 12349 VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 12350 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 12351 const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, 12352 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12353 { 12354 d.vkSubmitDebugUtilsMessageEXT( m_instance, 12355 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 12356 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 12357 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); 12358 } 12359 12360 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12361 template <typename Dispatch> 12362 VULKAN_HPP_INLINE void submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const DebugUtilsMessengerCallbackDataEXT & callbackData,Dispatch const & d) const12363 Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, 12364 VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, 12365 const DebugUtilsMessengerCallbackDataEXT & callbackData, 12366 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12367 { 12368 d.vkSubmitDebugUtilsMessageEXT( m_instance, 12369 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), 12370 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), 12371 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) ); 12372 } 12373 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12374 12375 #if defined( VK_USE_PLATFORM_ANDROID_KHR ) 12376 //=== VK_ANDROID_external_memory_android_hardware_buffer === 12377 12378 template <typename Dispatch> getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer * buffer,VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,Dispatch const & d) const12379 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( 12380 const struct AHardwareBuffer * buffer, 12381 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, 12382 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12383 { 12384 return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( 12385 m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); 12386 } 12387 12388 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12389 template <typename Dispatch> 12390 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12391 typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const12392 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 12393 { 12394 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; 12395 Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( 12396 m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 12397 return createResultValue( 12398 result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 12399 } 12400 12401 template <typename X, typename Y, typename... Z, typename Dispatch> 12402 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer & buffer,Dispatch const & d) const12403 Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const 12404 { 12405 StructureChain<X, Y, Z...> structureChain; 12406 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = 12407 structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>(); 12408 Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( 12409 m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); 12410 return createResultValue( 12411 result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); 12412 } 12413 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12414 12415 template <typename Dispatch> getMemoryAndroidHardwareBufferANDROID(const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer,Dispatch const & d) const12416 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( 12417 const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, 12418 struct AHardwareBuffer ** pBuffer, 12419 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12420 { 12421 return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( 12422 m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); 12423 } 12424 12425 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12426 template <typename Dispatch> 12427 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type getMemoryAndroidHardwareBufferANDROID(const MemoryGetAndroidHardwareBufferInfoANDROID & info,Dispatch const & d) const12428 Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, 12429 Dispatch const & d ) const 12430 { 12431 struct AHardwareBuffer * buffer; 12432 Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( 12433 m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) ); 12434 return createResultValue( 12435 result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); 12436 } 12437 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12438 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ 12439 12440 //=== VK_EXT_sample_locations === 12441 12442 template <typename Dispatch> 12443 VULKAN_HPP_INLINE void setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,Dispatch const & d) const12444 CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, 12445 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12446 { 12447 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, 12448 reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); 12449 } 12450 12451 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12452 template <typename Dispatch> setSampleLocationsEXT(const SampleLocationsInfoEXT & sampleLocationsInfo,Dispatch const & d) const12453 VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, 12454 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12455 { 12456 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, 12457 reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) ); 12458 } 12459 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12460 12461 template <typename Dispatch> getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,Dispatch const & d) const12462 VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( 12463 VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 12464 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, 12465 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12466 { 12467 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 12468 m_physicalDevice, 12469 static_cast<VkSampleCountFlagBits>( samples ), 12470 reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); 12471 } 12472 12473 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12474 template <typename Dispatch> 12475 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,Dispatch const & d) const12476 PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, 12477 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12478 { 12479 VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; 12480 d.vkGetPhysicalDeviceMultisamplePropertiesEXT( 12481 m_physicalDevice, 12482 static_cast<VkSampleCountFlagBits>( samples ), 12483 reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) ); 12484 return multisampleProperties; 12485 } 12486 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12487 12488 //=== VK_KHR_get_memory_requirements2 === 12489 12490 template <typename Dispatch> 12491 VULKAN_HPP_INLINE void getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const12492 Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, 12493 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 12494 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12495 { 12496 d.vkGetImageMemoryRequirements2KHR( m_device, 12497 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), 12498 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 12499 } 12500 12501 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12502 template <typename Dispatch> 12503 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(const ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const12504 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, 12505 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12506 { 12507 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 12508 d.vkGetImageMemoryRequirements2KHR( m_device, 12509 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), 12510 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 12511 return memoryRequirements; 12512 } 12513 12514 template <typename X, typename Y, typename... Z, typename Dispatch> 12515 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(const ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const12516 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, 12517 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12518 { 12519 StructureChain<X, Y, Z...> structureChain; 12520 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = 12521 structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 12522 d.vkGetImageMemoryRequirements2KHR( m_device, 12523 reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), 12524 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 12525 return structureChain; 12526 } 12527 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12528 12529 template <typename Dispatch> 12530 VULKAN_HPP_INLINE void getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const12531 Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, 12532 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 12533 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12534 { 12535 d.vkGetBufferMemoryRequirements2KHR( m_device, 12536 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), 12537 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 12538 } 12539 12540 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12541 template <typename Dispatch> 12542 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(const BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const12543 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, 12544 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12545 { 12546 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 12547 d.vkGetBufferMemoryRequirements2KHR( m_device, 12548 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), 12549 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 12550 return memoryRequirements; 12551 } 12552 12553 template <typename X, typename Y, typename... Z, typename Dispatch> 12554 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(const BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const12555 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, 12556 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12557 { 12558 StructureChain<X, Y, Z...> structureChain; 12559 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = 12560 structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 12561 d.vkGetBufferMemoryRequirements2KHR( m_device, 12562 reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), 12563 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 12564 return structureChain; 12565 } 12566 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12567 12568 template <typename Dispatch> getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const12569 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( 12570 const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, 12571 uint32_t * pSparseMemoryRequirementCount, 12572 VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, 12573 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12574 { 12575 d.vkGetImageSparseMemoryRequirements2KHR( 12576 m_device, 12577 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), 12578 pSparseMemoryRequirementCount, 12579 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); 12580 } 12581 12582 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12583 template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> 12584 VULKAN_HPP_NODISCARD 12585 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const ImageSparseMemoryRequirementsInfo2 & info,Dispatch const & d) const12586 Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, 12587 Dispatch const & d ) const 12588 { 12589 std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; 12590 uint32_t sparseMemoryRequirementCount; 12591 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 12592 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 12593 &sparseMemoryRequirementCount, 12594 nullptr ); 12595 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 12596 d.vkGetImageSparseMemoryRequirements2KHR( 12597 m_device, 12598 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 12599 &sparseMemoryRequirementCount, 12600 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 12601 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 12602 return sparseMemoryRequirements; 12603 } 12604 12605 template < 12606 typename SparseImageMemoryRequirements2Allocator, 12607 typename Dispatch, 12608 typename B, 12609 typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type> 12610 VULKAN_HPP_NODISCARD 12611 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR(const ImageSparseMemoryRequirementsInfo2 & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const12612 Device::getImageSparseMemoryRequirements2KHR( 12613 const ImageSparseMemoryRequirementsInfo2 & info, 12614 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, 12615 Dispatch const & d ) const 12616 { 12617 std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( 12618 sparseImageMemoryRequirements2Allocator ); 12619 uint32_t sparseMemoryRequirementCount; 12620 d.vkGetImageSparseMemoryRequirements2KHR( m_device, 12621 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 12622 &sparseMemoryRequirementCount, 12623 nullptr ); 12624 sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); 12625 d.vkGetImageSparseMemoryRequirements2KHR( 12626 m_device, 12627 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), 12628 &sparseMemoryRequirementCount, 12629 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); 12630 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); 12631 return sparseMemoryRequirements; 12632 } 12633 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12634 12635 //=== VK_KHR_acceleration_structure === 12636 12637 template <typename Dispatch> createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,Dispatch const & d) const12638 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( 12639 const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, 12640 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12641 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, 12642 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12643 { 12644 return static_cast<Result>( 12645 d.vkCreateAccelerationStructureKHR( m_device, 12646 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), 12647 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 12648 reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); 12649 } 12650 12651 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12652 template <typename Dispatch> 12653 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12654 typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR(const AccelerationStructureCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12655 Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, 12656 Optional<const AllocationCallbacks> allocator, 12657 Dispatch const & d ) const 12658 { 12659 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 12660 Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( 12661 m_device, 12662 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 12663 reinterpret_cast<const VkAllocationCallbacks *>( 12664 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12665 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 12666 return createResultValue( 12667 result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); 12668 } 12669 12670 # ifndef VULKAN_HPP_NO_SMART_HANDLE 12671 template <typename Dispatch> 12672 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 12673 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique(const AccelerationStructureCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12674 Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, 12675 Optional<const AllocationCallbacks> allocator, 12676 Dispatch const & d ) const 12677 { 12678 VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; 12679 Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( 12680 m_device, 12681 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), 12682 reinterpret_cast<const VkAllocationCallbacks *>( 12683 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 12684 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); 12685 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 12686 return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( 12687 result, 12688 accelerationStructure, 12689 VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique", 12690 deleter ); 12691 } 12692 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 12693 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12694 12695 template <typename Dispatch> 12696 VULKAN_HPP_INLINE void destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12697 Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 12698 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12699 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12700 { 12701 d.vkDestroyAccelerationStructureKHR( m_device, 12702 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 12703 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12704 } 12705 12706 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12707 template <typename Dispatch> 12708 VULKAN_HPP_INLINE void destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12709 Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 12710 Optional<const AllocationCallbacks> allocator, 12711 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12712 { 12713 d.vkDestroyAccelerationStructureKHR( 12714 m_device, 12715 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 12716 reinterpret_cast<const VkAllocationCallbacks *>( 12717 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12718 } 12719 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12720 12721 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const12722 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 12723 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 12724 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12725 { 12726 d.vkDestroyAccelerationStructureKHR( m_device, 12727 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 12728 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 12729 } 12730 12731 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12732 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const12733 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, 12734 Optional<const AllocationCallbacks> allocator, 12735 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12736 { 12737 d.vkDestroyAccelerationStructureKHR( 12738 m_device, 12739 static_cast<VkAccelerationStructureKHR>( accelerationStructure ), 12740 reinterpret_cast<const VkAllocationCallbacks *>( 12741 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 12742 } 12743 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12744 12745 template <typename Dispatch> buildAccelerationStructuresKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const12746 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( 12747 uint32_t infoCount, 12748 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 12749 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 12750 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12751 { 12752 d.vkCmdBuildAccelerationStructuresKHR( 12753 m_commandBuffer, 12754 infoCount, 12755 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 12756 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); 12757 } 12758 12759 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12760 template <typename Dispatch> buildAccelerationStructuresKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const12761 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( 12762 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 12763 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 12764 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 12765 { 12766 # ifdef VULKAN_HPP_NO_EXCEPTIONS 12767 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 12768 # else 12769 if ( infos.size() != pBuildRangeInfos.size() ) 12770 { 12771 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 12772 "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 12773 } 12774 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 12775 12776 d.vkCmdBuildAccelerationStructuresKHR( 12777 m_commandBuffer, 12778 infos.size(), 12779 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 12780 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ); 12781 } 12782 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12783 12784 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,const uint32_t * pIndirectStrides,const uint32_t * const * ppMaxPrimitiveCounts,Dispatch const & d) const12785 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( 12786 uint32_t infoCount, 12787 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 12788 const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, 12789 const uint32_t * pIndirectStrides, 12790 const uint32_t * const * ppMaxPrimitiveCounts, 12791 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12792 { 12793 d.vkCmdBuildAccelerationStructuresIndirectKHR( 12794 m_commandBuffer, 12795 infoCount, 12796 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 12797 reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), 12798 pIndirectStrides, 12799 ppMaxPrimitiveCounts ); 12800 } 12801 12802 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12803 template <typename Dispatch> buildAccelerationStructuresIndirectKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,ArrayProxy<const uint32_t> const & indirectStrides,ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,Dispatch const & d) const12804 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( 12805 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 12806 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, 12807 ArrayProxy<const uint32_t> const & indirectStrides, 12808 ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, 12809 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 12810 { 12811 # ifdef VULKAN_HPP_NO_EXCEPTIONS 12812 VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); 12813 VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); 12814 VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); 12815 # else 12816 if ( infos.size() != indirectDeviceAddresses.size() ) 12817 { 12818 throw LogicError( 12819 VULKAN_HPP_NAMESPACE_STRING 12820 "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); 12821 } 12822 if ( infos.size() != indirectStrides.size() ) 12823 { 12824 throw LogicError( 12825 VULKAN_HPP_NAMESPACE_STRING 12826 "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); 12827 } 12828 if ( infos.size() != pMaxPrimitiveCounts.size() ) 12829 { 12830 throw LogicError( 12831 VULKAN_HPP_NAMESPACE_STRING 12832 "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); 12833 } 12834 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 12835 12836 d.vkCmdBuildAccelerationStructuresIndirectKHR( 12837 m_commandBuffer, 12838 infos.size(), 12839 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 12840 reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), 12841 indirectStrides.data(), 12842 pMaxPrimitiveCounts.data() ); 12843 } 12844 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12845 12846 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,uint32_t infoCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,Dispatch const & d) const12847 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( 12848 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12849 uint32_t infoCount, 12850 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, 12851 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, 12852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12853 { 12854 return static_cast<Result>( d.vkBuildAccelerationStructuresKHR( 12855 m_device, 12856 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12857 infoCount, 12858 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), 12859 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) ); 12860 } 12861 12862 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12863 template <typename Dispatch> buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,Dispatch const & d) const12864 VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( 12865 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12866 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, 12867 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, 12868 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 12869 { 12870 # ifdef VULKAN_HPP_NO_EXCEPTIONS 12871 VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); 12872 # else 12873 if ( infos.size() != pBuildRangeInfos.size() ) 12874 { 12875 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 12876 "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); 12877 } 12878 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 12879 12880 Result result = static_cast<Result>( d.vkBuildAccelerationStructuresKHR( 12881 m_device, 12882 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12883 infos.size(), 12884 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), 12885 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) ); 12886 return createResultValue( result, 12887 VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", 12888 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 12889 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 12890 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 12891 } 12892 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12893 12894 template <typename Dispatch> 12895 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const12896 Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12897 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 12898 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12899 { 12900 return static_cast<Result>( 12901 d.vkCopyAccelerationStructureKHR( m_device, 12902 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12903 reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); 12904 } 12905 12906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12907 template <typename Dispatch> 12908 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const12909 Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12910 const CopyAccelerationStructureInfoKHR & info, 12911 Dispatch const & d ) const 12912 { 12913 Result result = static_cast<Result>( 12914 d.vkCopyAccelerationStructureKHR( m_device, 12915 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12916 reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) ); 12917 return createResultValue( result, 12918 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", 12919 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 12920 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 12921 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 12922 } 12923 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12924 12925 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const12926 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( 12927 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12928 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 12929 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12930 { 12931 return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( 12932 m_device, 12933 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12934 reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); 12935 } 12936 12937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12938 template <typename Dispatch> 12939 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const12940 Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12941 const CopyAccelerationStructureToMemoryInfoKHR & info, 12942 Dispatch const & d ) const 12943 { 12944 Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( 12945 m_device, 12946 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12947 reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) ); 12948 return createResultValue( result, 12949 VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", 12950 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 12951 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 12952 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 12953 } 12954 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12955 12956 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const12957 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( 12958 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12959 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 12960 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12961 { 12962 return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( 12963 m_device, 12964 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12965 reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); 12966 } 12967 12968 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 12969 template <typename Dispatch> 12970 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,const CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const12971 Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 12972 const CopyMemoryToAccelerationStructureInfoKHR & info, 12973 Dispatch const & d ) const 12974 { 12975 Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( 12976 m_device, 12977 static_cast<VkDeferredOperationKHR>( deferredOperation ), 12978 reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) ); 12979 return createResultValue( result, 12980 VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", 12981 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 12982 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 12983 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); 12984 } 12985 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 12986 12987 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,void * pData,size_t stride,Dispatch const & d) const12988 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( 12989 uint32_t accelerationStructureCount, 12990 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 12991 VULKAN_HPP_NAMESPACE::QueryType queryType, 12992 size_t dataSize, 12993 void * pData, 12994 size_t stride, 12995 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 12996 { 12997 return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( 12998 m_device, 12999 accelerationStructureCount, 13000 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 13001 static_cast<VkQueryType>( queryType ), 13002 dataSize, 13003 pData, 13004 stride ) ); 13005 } 13006 13007 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13008 template <typename T, typename Dispatch> 13009 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 13010 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE writeAccelerationStructuresPropertiesKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,ArrayProxy<T> const & data,size_t stride,Dispatch const & d) const13011 typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR( 13012 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 13013 VULKAN_HPP_NAMESPACE::QueryType queryType, 13014 ArrayProxy<T> const & data, 13015 size_t stride, 13016 Dispatch const & d ) const 13017 { 13018 Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( 13019 m_device, 13020 accelerationStructures.size(), 13021 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 13022 static_cast<VkQueryType>( queryType ), 13023 data.size() * sizeof( T ), 13024 reinterpret_cast<void *>( data.data() ), 13025 stride ) ); 13026 return createResultValue( result, 13027 VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); 13028 } 13029 13030 template <typename T, typename Allocator, typename Dispatch> 13031 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type writeAccelerationStructuresPropertiesKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t dataSize,size_t stride,Dispatch const & d) const13032 Device::writeAccelerationStructuresPropertiesKHR( 13033 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 13034 VULKAN_HPP_NAMESPACE::QueryType queryType, 13035 size_t dataSize, 13036 size_t stride, 13037 Dispatch const & d ) const 13038 { 13039 VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); 13040 std::vector<T, Allocator> data( dataSize / sizeof( T ) ); 13041 Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( 13042 m_device, 13043 accelerationStructures.size(), 13044 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 13045 static_cast<VkQueryType>( queryType ), 13046 data.size() * sizeof( T ), 13047 reinterpret_cast<void *>( data.data() ), 13048 stride ) ); 13049 return createResultValue( 13050 result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); 13051 } 13052 13053 template <typename T, typename Dispatch> 13054 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type writeAccelerationStructuresPropertyKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,size_t stride,Dispatch const & d) const13055 Device::writeAccelerationStructuresPropertyKHR( 13056 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 13057 VULKAN_HPP_NAMESPACE::QueryType queryType, 13058 size_t stride, 13059 Dispatch const & d ) const 13060 { 13061 T data; 13062 Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( 13063 m_device, 13064 accelerationStructures.size(), 13065 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 13066 static_cast<VkQueryType>( queryType ), 13067 sizeof( T ), 13068 reinterpret_cast<void *>( &data ), 13069 stride ) ); 13070 return createResultValue( 13071 result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); 13072 } 13073 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13074 13075 template <typename Dispatch> 13076 VULKAN_HPP_INLINE void copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const13077 CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, 13078 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13079 { 13080 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, 13081 reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); 13082 } 13083 13084 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13085 template <typename Dispatch> copyAccelerationStructureKHR(const CopyAccelerationStructureInfoKHR & info,Dispatch const & d) const13086 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, 13087 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13088 { 13089 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, 13090 reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ); 13091 } 13092 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13093 13094 template <typename Dispatch> copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,Dispatch const & d) const13095 VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( 13096 const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, 13097 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13098 { 13099 d.vkCmdCopyAccelerationStructureToMemoryKHR( 13100 m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); 13101 } 13102 13103 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13104 template <typename Dispatch> 13105 VULKAN_HPP_INLINE void copyAccelerationStructureToMemoryKHR(const CopyAccelerationStructureToMemoryInfoKHR & info,Dispatch const & d) const13106 CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, 13107 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13108 { 13109 d.vkCmdCopyAccelerationStructureToMemoryKHR( 13110 m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ); 13111 } 13112 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13113 13114 template <typename Dispatch> copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,Dispatch const & d) const13115 VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( 13116 const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, 13117 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13118 { 13119 d.vkCmdCopyMemoryToAccelerationStructureKHR( 13120 m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); 13121 } 13122 13123 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13124 template <typename Dispatch> 13125 VULKAN_HPP_INLINE void copyMemoryToAccelerationStructureKHR(const CopyMemoryToAccelerationStructureInfoKHR & info,Dispatch const & d) const13126 CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, 13127 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13128 { 13129 d.vkCmdCopyMemoryToAccelerationStructureKHR( 13130 m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ); 13131 } 13132 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13133 13134 template <typename Dispatch> getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,Dispatch const & d) const13135 VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( 13136 const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, 13137 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13138 { 13139 return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( 13140 m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); 13141 } 13142 13143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13144 template <typename Dispatch> getAccelerationStructureAddressKHR(const AccelerationStructureDeviceAddressInfoKHR & info,Dispatch const & d) const13145 VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( 13146 const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13147 { 13148 return d.vkGetAccelerationStructureDeviceAddressKHR( 13149 m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ); 13150 } 13151 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13152 13153 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const13154 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( 13155 uint32_t accelerationStructureCount, 13156 const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, 13157 VULKAN_HPP_NAMESPACE::QueryType queryType, 13158 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 13159 uint32_t firstQuery, 13160 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13161 { 13162 d.vkCmdWriteAccelerationStructuresPropertiesKHR( 13163 m_commandBuffer, 13164 accelerationStructureCount, 13165 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), 13166 static_cast<VkQueryType>( queryType ), 13167 static_cast<VkQueryPool>( queryPool ), 13168 firstQuery ); 13169 } 13170 13171 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13172 template <typename Dispatch> writeAccelerationStructuresPropertiesKHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const13173 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( 13174 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, 13175 VULKAN_HPP_NAMESPACE::QueryType queryType, 13176 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 13177 uint32_t firstQuery, 13178 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13179 { 13180 d.vkCmdWriteAccelerationStructuresPropertiesKHR( 13181 m_commandBuffer, 13182 accelerationStructures.size(), 13183 reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), 13184 static_cast<VkQueryType>( queryType ), 13185 static_cast<VkQueryPool>( queryPool ), 13186 firstQuery ); 13187 } 13188 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13189 13190 template <typename Dispatch> getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,Dispatch const & d) const13191 VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( 13192 const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, 13193 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, 13194 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13195 { 13196 d.vkGetDeviceAccelerationStructureCompatibilityKHR( 13197 m_device, 13198 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), 13199 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); 13200 } 13201 13202 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13203 template <typename Dispatch> 13204 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR(const AccelerationStructureVersionInfoKHR & versionInfo,Dispatch const & d) const13205 Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, 13206 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13207 { 13208 VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; 13209 d.vkGetDeviceAccelerationStructureCompatibilityKHR( 13210 m_device, 13211 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), 13212 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); 13213 return compatibility; 13214 } 13215 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13216 13217 template <typename Dispatch> getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,const uint32_t * pMaxPrimitiveCounts,VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,Dispatch const & d) const13218 VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( 13219 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 13220 const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, 13221 const uint32_t * pMaxPrimitiveCounts, 13222 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, 13223 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13224 { 13225 d.vkGetAccelerationStructureBuildSizesKHR( 13226 m_device, 13227 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 13228 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), 13229 pMaxPrimitiveCounts, 13230 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); 13231 } 13232 13233 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13234 template <typename Dispatch> 13235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,const AccelerationStructureBuildGeometryInfoKHR & buildInfo,ArrayProxy<const uint32_t> const & maxPrimitiveCounts,Dispatch const & d) const13236 Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, 13237 const AccelerationStructureBuildGeometryInfoKHR & buildInfo, 13238 ArrayProxy<const uint32_t> const & maxPrimitiveCounts, 13239 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 13240 { 13241 # ifdef VULKAN_HPP_NO_EXCEPTIONS 13242 VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); 13243 # else 13244 if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) 13245 { 13246 throw LogicError( 13247 VULKAN_HPP_NAMESPACE_STRING 13248 "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); 13249 } 13250 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 13251 VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; 13252 d.vkGetAccelerationStructureBuildSizesKHR( 13253 m_device, 13254 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), 13255 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), 13256 maxPrimitiveCounts.data(), 13257 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); 13258 return sizeInfo; 13259 } 13260 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13261 13262 //=== VK_KHR_sampler_ycbcr_conversion === 13263 13264 template <typename Dispatch> 13265 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const13266 Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, 13267 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13268 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, 13269 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13270 { 13271 return static_cast<Result>( 13272 d.vkCreateSamplerYcbcrConversionKHR( m_device, 13273 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), 13274 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13275 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); 13276 } 13277 13278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13279 template <typename Dispatch> 13280 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 13281 typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR(const SamplerYcbcrConversionCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13282 Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, 13283 Optional<const AllocationCallbacks> allocator, 13284 Dispatch const & d ) const 13285 { 13286 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 13287 Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( 13288 m_device, 13289 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 13290 reinterpret_cast<const VkAllocationCallbacks *>( 13291 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13292 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 13293 return createResultValue( 13294 result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); 13295 } 13296 13297 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13298 template <typename Dispatch> 13299 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 13300 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique(const SamplerYcbcrConversionCreateInfo & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13301 Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, 13302 Optional<const AllocationCallbacks> allocator, 13303 Dispatch const & d ) const 13304 { 13305 VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; 13306 Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( 13307 m_device, 13308 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), 13309 reinterpret_cast<const VkAllocationCallbacks *>( 13310 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13311 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); 13312 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 13313 return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( 13314 result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter ); 13315 } 13316 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 13317 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13318 13319 template <typename Dispatch> 13320 VULKAN_HPP_INLINE void destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13321 Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 13322 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13323 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13324 { 13325 d.vkDestroySamplerYcbcrConversionKHR( m_device, 13326 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 13327 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13328 } 13329 13330 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13331 template <typename Dispatch> 13332 VULKAN_HPP_INLINE void destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13333 Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, 13334 Optional<const AllocationCallbacks> allocator, 13335 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13336 { 13337 d.vkDestroySamplerYcbcrConversionKHR( 13338 m_device, 13339 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), 13340 reinterpret_cast<const VkAllocationCallbacks *>( 13341 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13342 } 13343 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13344 13345 //=== VK_KHR_bind_memory2 === 13346 13347 template <typename Dispatch> 13348 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindBufferMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const13349 Device::bindBufferMemory2KHR( uint32_t bindInfoCount, 13350 const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, 13351 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13352 { 13353 return static_cast<Result>( d.vkBindBufferMemory2KHR( 13354 m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); 13355 } 13356 13357 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13358 template <typename Dispatch> 13359 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindBufferMemory2KHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const13360 Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, 13361 Dispatch const & d ) const 13362 { 13363 Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( 13364 m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); 13365 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); 13366 } 13367 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13368 13369 template <typename Dispatch> 13370 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result bindImageMemory2KHR(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const13371 Device::bindImageMemory2KHR( uint32_t bindInfoCount, 13372 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, 13373 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13374 { 13375 return static_cast<Result>( d.vkBindImageMemory2KHR( 13376 m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); 13377 } 13378 13379 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13380 template <typename Dispatch> 13381 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindImageMemory2KHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const13382 Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, 13383 Dispatch const & d ) const 13384 { 13385 Result result = static_cast<Result>( d.vkBindImageMemory2KHR( 13386 m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); 13387 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); 13388 } 13389 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13390 13391 //=== VK_EXT_image_drm_format_modifier === 13392 13393 template <typename Dispatch> getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,Dispatch const & d) const13394 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( 13395 VULKAN_HPP_NAMESPACE::Image image, 13396 VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, 13397 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13398 { 13399 return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 13400 m_device, 13401 static_cast<VkImage>( image ), 13402 reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); 13403 } 13404 13405 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13406 template <typename Dispatch> 13407 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const13408 Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const 13409 { 13410 VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; 13411 Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( 13412 m_device, 13413 static_cast<VkImage>( image ), 13414 reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) ); 13415 return createResultValue( 13416 result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); 13417 } 13418 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13419 13420 //=== VK_EXT_validation_cache === 13421 13422 template <typename Dispatch> 13423 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,Dispatch const & d) const13424 Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, 13425 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13426 VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, 13427 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13428 { 13429 return static_cast<Result>( 13430 d.vkCreateValidationCacheEXT( m_device, 13431 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), 13432 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13433 reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); 13434 } 13435 13436 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13437 template <typename Dispatch> 13438 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(const ValidationCacheCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13439 Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, 13440 Optional<const AllocationCallbacks> allocator, 13441 Dispatch const & d ) const 13442 { 13443 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 13444 Result result = static_cast<Result>( 13445 d.vkCreateValidationCacheEXT( m_device, 13446 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 13447 reinterpret_cast<const VkAllocationCallbacks *>( 13448 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13449 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 13450 return createResultValue( 13451 result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); 13452 } 13453 13454 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13455 template <typename Dispatch> 13456 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique(const ValidationCacheCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13457 Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, 13458 Optional<const AllocationCallbacks> allocator, 13459 Dispatch const & d ) const 13460 { 13461 VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; 13462 Result result = static_cast<Result>( 13463 d.vkCreateValidationCacheEXT( m_device, 13464 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), 13465 reinterpret_cast<const VkAllocationCallbacks *>( 13466 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13467 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); 13468 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 13469 return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( 13470 result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter ); 13471 } 13472 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 13473 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13474 13475 template <typename Dispatch> 13476 VULKAN_HPP_INLINE void destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13477 Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13478 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13479 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13480 { 13481 d.vkDestroyValidationCacheEXT( m_device, 13482 static_cast<VkValidationCacheEXT>( validationCache ), 13483 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13484 } 13485 13486 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13487 template <typename Dispatch> destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13488 VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13489 Optional<const AllocationCallbacks> allocator, 13490 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13491 { 13492 d.vkDestroyValidationCacheEXT( m_device, 13493 static_cast<VkValidationCacheEXT>( validationCache ), 13494 reinterpret_cast<const VkAllocationCallbacks *>( 13495 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13496 } 13497 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13498 13499 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13500 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13501 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13502 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13503 { 13504 d.vkDestroyValidationCacheEXT( m_device, 13505 static_cast<VkValidationCacheEXT>( validationCache ), 13506 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13507 } 13508 13509 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13510 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13511 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13512 Optional<const AllocationCallbacks> allocator, 13513 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13514 { 13515 d.vkDestroyValidationCacheEXT( m_device, 13516 static_cast<VkValidationCacheEXT>( validationCache ), 13517 reinterpret_cast<const VkAllocationCallbacks *>( 13518 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13519 } 13520 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13521 13522 template <typename Dispatch> 13523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,uint32_t srcCacheCount,const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,Dispatch const & d) const13524 Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 13525 uint32_t srcCacheCount, 13526 const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, 13527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13528 { 13529 return static_cast<Result>( 13530 d.vkMergeValidationCachesEXT( m_device, 13531 static_cast<VkValidationCacheEXT>( dstCache ), 13532 srcCacheCount, 13533 reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); 13534 } 13535 13536 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13537 template <typename Dispatch> 13538 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,Dispatch const & d) const13539 Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, 13540 ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, 13541 Dispatch const & d ) const 13542 { 13543 Result result = static_cast<Result>( 13544 d.vkMergeValidationCachesEXT( m_device, 13545 static_cast<VkValidationCacheEXT>( dstCache ), 13546 srcCaches.size(), 13547 reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) ); 13548 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); 13549 } 13550 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13551 13552 template <typename Dispatch> 13553 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,size_t * pDataSize,void * pData,Dispatch const & d) const13554 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13555 size_t * pDataSize, 13556 void * pData, 13557 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13558 { 13559 return static_cast<Result>( d.vkGetValidationCacheDataEXT( 13560 m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); 13561 } 13562 13563 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13564 template <typename Uint8_tAllocator, typename Dispatch> 13565 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Dispatch const & d) const13566 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13567 Dispatch const & d ) const 13568 { 13569 std::vector<uint8_t, Uint8_tAllocator> data; 13570 size_t dataSize; 13571 Result result; 13572 do 13573 { 13574 result = static_cast<Result>( d.vkGetValidationCacheDataEXT( 13575 m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 13576 if ( ( result == Result::eSuccess ) && dataSize ) 13577 { 13578 data.resize( dataSize ); 13579 result = 13580 static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, 13581 static_cast<VkValidationCacheEXT>( validationCache ), 13582 &dataSize, 13583 reinterpret_cast<void *>( data.data() ) ) ); 13584 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 13585 } 13586 } while ( result == Result::eIncomplete ); 13587 if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) 13588 { 13589 data.resize( dataSize ); 13590 } 13591 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 13592 } 13593 13594 template <typename Uint8_tAllocator, 13595 typename Dispatch, 13596 typename B, 13597 typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type> 13598 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,Uint8_tAllocator & uint8_tAllocator,Dispatch const & d) const13599 Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, 13600 Uint8_tAllocator & uint8_tAllocator, 13601 Dispatch const & d ) const 13602 { 13603 std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); 13604 size_t dataSize; 13605 Result result; 13606 do 13607 { 13608 result = static_cast<Result>( d.vkGetValidationCacheDataEXT( 13609 m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); 13610 if ( ( result == Result::eSuccess ) && dataSize ) 13611 { 13612 data.resize( dataSize ); 13613 result = 13614 static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, 13615 static_cast<VkValidationCacheEXT>( validationCache ), 13616 &dataSize, 13617 reinterpret_cast<void *>( data.data() ) ) ); 13618 VULKAN_HPP_ASSERT( dataSize <= data.size() ); 13619 } 13620 } while ( result == Result::eIncomplete ); 13621 if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) 13622 { 13623 data.resize( dataSize ); 13624 } 13625 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); 13626 } 13627 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13628 13629 //=== VK_NV_shading_rate_image === 13630 13631 template <typename Dispatch> bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,Dispatch const & d) const13632 VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, 13633 VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, 13634 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13635 { 13636 d.vkCmdBindShadingRateImageNV( 13637 m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); 13638 } 13639 13640 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,Dispatch const & d) const13641 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( 13642 uint32_t firstViewport, 13643 uint32_t viewportCount, 13644 const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, 13645 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13646 { 13647 d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, 13648 firstViewport, 13649 viewportCount, 13650 reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); 13651 } 13652 13653 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13654 template <typename Dispatch> setViewportShadingRatePaletteNV(uint32_t firstViewport,ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,Dispatch const & d) const13655 VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( 13656 uint32_t firstViewport, 13657 ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, 13658 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13659 { 13660 d.vkCmdSetViewportShadingRatePaletteNV( 13661 m_commandBuffer, 13662 firstViewport, 13663 shadingRatePalettes.size(), 13664 reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) ); 13665 } 13666 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13667 13668 template <typename Dispatch> 13669 VULKAN_HPP_INLINE void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,uint32_t customSampleOrderCount,const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,Dispatch const & d) const13670 CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 13671 uint32_t customSampleOrderCount, 13672 const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, 13673 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13674 { 13675 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 13676 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 13677 customSampleOrderCount, 13678 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); 13679 } 13680 13681 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13682 template <typename Dispatch> setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,Dispatch const & d) const13683 VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( 13684 VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, 13685 ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, 13686 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13687 { 13688 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, 13689 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), 13690 customSampleOrders.size(), 13691 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) ); 13692 } 13693 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13694 13695 //=== VK_NV_ray_tracing === 13696 13697 template <typename Dispatch> 13698 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,Dispatch const & d) const13699 Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, 13700 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13701 VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, 13702 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13703 { 13704 return static_cast<Result>( 13705 d.vkCreateAccelerationStructureNV( m_device, 13706 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), 13707 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13708 reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); 13709 } 13710 13711 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13712 template <typename Dispatch> 13713 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(const AccelerationStructureCreateInfoNV & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13714 Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, 13715 Optional<const AllocationCallbacks> allocator, 13716 Dispatch const & d ) const 13717 { 13718 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 13719 Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( 13720 m_device, 13721 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 13722 reinterpret_cast<const VkAllocationCallbacks *>( 13723 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13724 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 13725 return createResultValue( 13726 result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); 13727 } 13728 13729 # ifndef VULKAN_HPP_NO_SMART_HANDLE 13730 template <typename Dispatch> 13731 VULKAN_HPP_INLINE 13732 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique(const AccelerationStructureCreateInfoNV & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13733 Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, 13734 Optional<const AllocationCallbacks> allocator, 13735 Dispatch const & d ) const 13736 { 13737 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; 13738 Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( 13739 m_device, 13740 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), 13741 reinterpret_cast<const VkAllocationCallbacks *>( 13742 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 13743 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); 13744 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 13745 return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( 13746 result, 13747 accelerationStructure, 13748 VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique", 13749 deleter ); 13750 } 13751 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 13752 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13753 13754 template <typename Dispatch> 13755 VULKAN_HPP_INLINE void destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13756 Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 13757 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13758 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13759 { 13760 d.vkDestroyAccelerationStructureNV( m_device, 13761 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 13762 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13763 } 13764 13765 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13766 template <typename Dispatch> 13767 VULKAN_HPP_INLINE void destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13768 Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 13769 Optional<const AllocationCallbacks> allocator, 13770 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13771 { 13772 d.vkDestroyAccelerationStructureNV( 13773 m_device, 13774 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 13775 reinterpret_cast<const VkAllocationCallbacks *>( 13776 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13777 } 13778 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13779 13780 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const13781 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 13782 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13783 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13784 { 13785 d.vkDestroyAccelerationStructureNV( m_device, 13786 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 13787 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 13788 } 13789 13790 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13791 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13792 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 13793 Optional<const AllocationCallbacks> allocator, 13794 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13795 { 13796 d.vkDestroyAccelerationStructureNV( 13797 m_device, 13798 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 13799 reinterpret_cast<const VkAllocationCallbacks *>( 13800 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 13801 } 13802 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13803 13804 template <typename Dispatch> getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,Dispatch const & d) const13805 VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( 13806 const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, 13807 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, 13808 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13809 { 13810 d.vkGetAccelerationStructureMemoryRequirementsNV( 13811 m_device, 13812 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), 13813 reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); 13814 } 13815 13816 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13817 template <typename Dispatch> 13818 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(const AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const13819 Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, 13820 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13821 { 13822 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; 13823 d.vkGetAccelerationStructureMemoryRequirementsNV( 13824 m_device, 13825 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 13826 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 13827 return memoryRequirements; 13828 } 13829 13830 template <typename X, typename Y, typename... Z, typename Dispatch> 13831 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(const AccelerationStructureMemoryRequirementsInfoNV & info,Dispatch const & d) const13832 Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, 13833 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13834 { 13835 StructureChain<X, Y, Z...> structureChain; 13836 VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = 13837 structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>(); 13838 d.vkGetAccelerationStructureMemoryRequirementsNV( 13839 m_device, 13840 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), 13841 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); 13842 return structureChain; 13843 } 13844 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13845 13846 template <typename Dispatch> bindAccelerationStructureMemoryNV(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,Dispatch const & d) const13847 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( 13848 uint32_t bindInfoCount, 13849 const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, 13850 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13851 { 13852 return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( 13853 m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); 13854 } 13855 13856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13857 template <typename Dispatch> 13858 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type bindAccelerationStructureMemoryNV(ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,Dispatch const & d) const13859 Device::bindAccelerationStructureMemoryNV( 13860 ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, 13861 Dispatch const & d ) const 13862 { 13863 Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( 13864 m_device, 13865 bindInfos.size(), 13866 reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) ); 13867 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); 13868 } 13869 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13870 13871 template <typename Dispatch> 13872 VULKAN_HPP_INLINE void buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const13873 CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, 13874 VULKAN_HPP_NAMESPACE::Buffer instanceData, 13875 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 13876 VULKAN_HPP_NAMESPACE::Bool32 update, 13877 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 13878 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 13879 VULKAN_HPP_NAMESPACE::Buffer scratch, 13880 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 13881 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13882 { 13883 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 13884 reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), 13885 static_cast<VkBuffer>( instanceData ), 13886 static_cast<VkDeviceSize>( instanceOffset ), 13887 static_cast<VkBool32>( update ), 13888 static_cast<VkAccelerationStructureNV>( dst ), 13889 static_cast<VkAccelerationStructureNV>( src ), 13890 static_cast<VkBuffer>( scratch ), 13891 static_cast<VkDeviceSize>( scratchOffset ) ); 13892 } 13893 13894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13895 template <typename Dispatch> buildAccelerationStructureNV(const AccelerationStructureInfoNV & info,VULKAN_HPP_NAMESPACE::Buffer instanceData,VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,VULKAN_HPP_NAMESPACE::Bool32 update,VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::Buffer scratch,VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,Dispatch const & d) const13896 VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, 13897 VULKAN_HPP_NAMESPACE::Buffer instanceData, 13898 VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, 13899 VULKAN_HPP_NAMESPACE::Bool32 update, 13900 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 13901 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 13902 VULKAN_HPP_NAMESPACE::Buffer scratch, 13903 VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, 13904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13905 { 13906 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, 13907 reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), 13908 static_cast<VkBuffer>( instanceData ), 13909 static_cast<VkDeviceSize>( instanceOffset ), 13910 static_cast<VkBool32>( update ), 13911 static_cast<VkAccelerationStructureNV>( dst ), 13912 static_cast<VkAccelerationStructureNV>( src ), 13913 static_cast<VkBuffer>( scratch ), 13914 static_cast<VkDeviceSize>( scratchOffset ) ); 13915 } 13916 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 13917 13918 template <typename Dispatch> 13919 VULKAN_HPP_INLINE void copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,Dispatch const & d) const13920 CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, 13921 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, 13922 VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, 13923 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13924 { 13925 d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, 13926 static_cast<VkAccelerationStructureNV>( dst ), 13927 static_cast<VkAccelerationStructureNV>( src ), 13928 static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); 13929 } 13930 13931 template <typename Dispatch> traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const13932 VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, 13933 VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, 13934 VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, 13935 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, 13936 VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, 13937 VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, 13938 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, 13939 VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, 13940 VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, 13941 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, 13942 VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, 13943 uint32_t width, 13944 uint32_t height, 13945 uint32_t depth, 13946 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13947 { 13948 d.vkCmdTraceRaysNV( m_commandBuffer, 13949 static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), 13950 static_cast<VkDeviceSize>( raygenShaderBindingOffset ), 13951 static_cast<VkBuffer>( missShaderBindingTableBuffer ), 13952 static_cast<VkDeviceSize>( missShaderBindingOffset ), 13953 static_cast<VkDeviceSize>( missShaderBindingStride ), 13954 static_cast<VkBuffer>( hitShaderBindingTableBuffer ), 13955 static_cast<VkDeviceSize>( hitShaderBindingOffset ), 13956 static_cast<VkDeviceSize>( hitShaderBindingStride ), 13957 static_cast<VkBuffer>( callableShaderBindingTableBuffer ), 13958 static_cast<VkDeviceSize>( callableShaderBindingOffset ), 13959 static_cast<VkDeviceSize>( callableShaderBindingStride ), 13960 width, 13961 height, 13962 depth ); 13963 } 13964 13965 template <typename Dispatch> 13966 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const13967 Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13968 uint32_t createInfoCount, 13969 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, 13970 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 13971 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 13972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 13973 { 13974 return static_cast<Result>( 13975 d.vkCreateRayTracingPipelinesNV( m_device, 13976 static_cast<VkPipelineCache>( pipelineCache ), 13977 createInfoCount, 13978 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), 13979 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 13980 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 13981 } 13982 13983 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 13984 template <typename PipelineAllocator, typename Dispatch> 13985 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const13986 Device::createRayTracingPipelinesNV( 13987 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 13988 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 13989 Optional<const AllocationCallbacks> allocator, 13990 Dispatch const & d ) const 13991 { 13992 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 13993 Result result = static_cast<Result>( 13994 d.vkCreateRayTracingPipelinesNV( m_device, 13995 static_cast<VkPipelineCache>( pipelineCache ), 13996 createInfos.size(), 13997 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 13998 reinterpret_cast<const VkAllocationCallbacks *>( 13999 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14000 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 14001 return createResultValue( 14002 result, 14003 pipelines, 14004 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 14005 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14006 } 14007 14008 template <typename PipelineAllocator, 14009 typename Dispatch, 14010 typename B, 14011 typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type> 14012 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const14013 Device::createRayTracingPipelinesNV( 14014 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14015 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 14016 Optional<const AllocationCallbacks> allocator, 14017 PipelineAllocator & pipelineAllocator, 14018 Dispatch const & d ) const 14019 { 14020 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 14021 Result result = static_cast<Result>( 14022 d.vkCreateRayTracingPipelinesNV( m_device, 14023 static_cast<VkPipelineCache>( pipelineCache ), 14024 createInfos.size(), 14025 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 14026 reinterpret_cast<const VkAllocationCallbacks *>( 14027 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14028 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 14029 return createResultValue( 14030 result, 14031 pipelines, 14032 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", 14033 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14034 } 14035 14036 template <typename Dispatch> 14037 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const14038 Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14039 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 14040 Optional<const AllocationCallbacks> allocator, 14041 Dispatch const & d ) const 14042 { 14043 Pipeline pipeline; 14044 Result result = static_cast<Result>( 14045 d.vkCreateRayTracingPipelinesNV( m_device, 14046 static_cast<VkPipelineCache>( pipelineCache ), 14047 1, 14048 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 14049 reinterpret_cast<const VkAllocationCallbacks *>( 14050 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14051 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 14052 return createResultValue( 14053 result, 14054 pipeline, 14055 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", 14056 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14057 } 14058 14059 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14060 template <typename Dispatch, typename PipelineAllocator> 14061 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const14062 Device::createRayTracingPipelinesNVUnique( 14063 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14064 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 14065 Optional<const AllocationCallbacks> allocator, 14066 Dispatch const & d ) const 14067 { 14068 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 14069 std::vector<Pipeline> pipelines( createInfos.size() ); 14070 Result result = static_cast<Result>( 14071 d.vkCreateRayTracingPipelinesNV( m_device, 14072 static_cast<VkPipelineCache>( pipelineCache ), 14073 createInfos.size(), 14074 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 14075 reinterpret_cast<const VkAllocationCallbacks *>( 14076 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14077 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 14078 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 14079 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 14080 { 14081 uniquePipelines.reserve( createInfos.size() ); 14082 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 14083 for ( size_t i = 0; i < createInfos.size(); i++ ) 14084 { 14085 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 14086 } 14087 } 14088 return createResultValue( 14089 result, 14090 std::move( uniquePipelines ), 14091 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 14092 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14093 } 14094 14095 template < 14096 typename Dispatch, 14097 typename PipelineAllocator, 14098 typename B, 14099 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type> 14100 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const14101 Device::createRayTracingPipelinesNVUnique( 14102 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14103 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, 14104 Optional<const AllocationCallbacks> allocator, 14105 PipelineAllocator & pipelineAllocator, 14106 Dispatch const & d ) const 14107 { 14108 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 14109 std::vector<Pipeline> pipelines( createInfos.size() ); 14110 Result result = static_cast<Result>( 14111 d.vkCreateRayTracingPipelinesNV( m_device, 14112 static_cast<VkPipelineCache>( pipelineCache ), 14113 createInfos.size(), 14114 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), 14115 reinterpret_cast<const VkAllocationCallbacks *>( 14116 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14117 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 14118 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 14119 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 14120 { 14121 uniquePipelines.reserve( createInfos.size() ); 14122 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 14123 for ( size_t i = 0; i < createInfos.size(); i++ ) 14124 { 14125 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 14126 } 14127 } 14128 return createResultValue( 14129 result, 14130 std::move( uniquePipelines ), 14131 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", 14132 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 14133 } 14134 14135 template <typename Dispatch> 14136 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const14137 Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 14138 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, 14139 Optional<const AllocationCallbacks> allocator, 14140 Dispatch const & d ) const 14141 { 14142 Pipeline pipeline; 14143 Result result = static_cast<Result>( 14144 d.vkCreateRayTracingPipelinesNV( m_device, 14145 static_cast<VkPipelineCache>( pipelineCache ), 14146 1, 14147 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), 14148 reinterpret_cast<const VkAllocationCallbacks *>( 14149 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 14150 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 14151 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 14152 return createResultValue<Pipeline, Dispatch>( 14153 result, 14154 pipeline, 14155 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", 14156 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, 14157 deleter ); 14158 } 14159 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 14160 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14161 14162 template <typename Dispatch> 14163 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const14164 Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 14165 uint32_t firstGroup, 14166 uint32_t groupCount, 14167 size_t dataSize, 14168 void * pData, 14169 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14170 { 14171 return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( 14172 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 14173 } 14174 14175 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14176 template <typename T, typename Dispatch> 14177 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 14178 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,ArrayProxy<T> const & data,Dispatch const & d) const14179 typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( 14180 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 14181 uint32_t firstGroup, 14182 uint32_t groupCount, 14183 ArrayProxy<T> const & data, 14184 Dispatch const & d ) const 14185 { 14186 Result result = 14187 static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, 14188 static_cast<VkPipeline>( pipeline ), 14189 firstGroup, 14190 groupCount, 14191 data.size() * sizeof( T ), 14192 reinterpret_cast<void *>( data.data() ) ) ); 14193 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); 14194 } 14195 14196 template <typename T, typename Allocator, typename Dispatch> 14197 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const14198 Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 14199 uint32_t firstGroup, 14200 uint32_t groupCount, 14201 size_t dataSize, 14202 Dispatch const & d ) const 14203 { 14204 VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); 14205 std::vector<T, Allocator> data( dataSize / sizeof( T ) ); 14206 Result result = 14207 static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, 14208 static_cast<VkPipeline>( pipeline ), 14209 firstGroup, 14210 groupCount, 14211 data.size() * sizeof( T ), 14212 reinterpret_cast<void *>( data.data() ) ) ); 14213 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); 14214 } 14215 14216 template <typename T, typename Dispatch> 14217 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const14218 Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 14219 uint32_t firstGroup, 14220 uint32_t groupCount, 14221 Dispatch const & d ) const 14222 { 14223 T data; 14224 Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, 14225 static_cast<VkPipeline>( pipeline ), 14226 firstGroup, 14227 groupCount, 14228 sizeof( T ), 14229 reinterpret_cast<void *>( &data ) ) ); 14230 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); 14231 } 14232 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14233 14234 template <typename Dispatch> 14235 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,void * pData,Dispatch const & d) const14236 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 14237 size_t dataSize, 14238 void * pData, 14239 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14240 { 14241 return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( 14242 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); 14243 } 14244 14245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14246 template <typename T, typename Dispatch> 14247 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 14248 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,ArrayProxy<T> const & data,Dispatch const & d) const14249 typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV( 14250 VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 14251 ArrayProxy<T> const & data, 14252 Dispatch const & d ) const 14253 { 14254 Result result = static_cast<Result>( 14255 d.vkGetAccelerationStructureHandleNV( m_device, 14256 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 14257 data.size() * sizeof( T ), 14258 reinterpret_cast<void *>( data.data() ) ) ); 14259 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 14260 } 14261 14262 template <typename T, typename Allocator, typename Dispatch> 14263 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,size_t dataSize,Dispatch const & d) const14264 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 14265 size_t dataSize, 14266 Dispatch const & d ) const 14267 { 14268 VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); 14269 std::vector<T, Allocator> data( dataSize / sizeof( T ) ); 14270 Result result = static_cast<Result>( 14271 d.vkGetAccelerationStructureHandleNV( m_device, 14272 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 14273 data.size() * sizeof( T ), 14274 reinterpret_cast<void *>( data.data() ) ) ); 14275 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 14276 } 14277 14278 template <typename T, typename Dispatch> 14279 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,Dispatch const & d) const14280 Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, 14281 Dispatch const & d ) const 14282 { 14283 T data; 14284 Result result = static_cast<Result>( 14285 d.vkGetAccelerationStructureHandleNV( m_device, 14286 static_cast<VkAccelerationStructureNV>( accelerationStructure ), 14287 sizeof( T ), 14288 reinterpret_cast<void *>( &data ) ) ); 14289 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); 14290 } 14291 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14292 14293 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount,const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const14294 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( 14295 uint32_t accelerationStructureCount, 14296 const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, 14297 VULKAN_HPP_NAMESPACE::QueryType queryType, 14298 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 14299 uint32_t firstQuery, 14300 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14301 { 14302 d.vkCmdWriteAccelerationStructuresPropertiesNV( 14303 m_commandBuffer, 14304 accelerationStructureCount, 14305 reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), 14306 static_cast<VkQueryType>( queryType ), 14307 static_cast<VkQueryPool>( queryPool ), 14308 firstQuery ); 14309 } 14310 14311 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14312 template <typename Dispatch> writeAccelerationStructuresPropertiesNV(ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,VULKAN_HPP_NAMESPACE::QueryType queryType,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,Dispatch const & d) const14313 VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( 14314 ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, 14315 VULKAN_HPP_NAMESPACE::QueryType queryType, 14316 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 14317 uint32_t firstQuery, 14318 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14319 { 14320 d.vkCmdWriteAccelerationStructuresPropertiesNV( 14321 m_commandBuffer, 14322 accelerationStructures.size(), 14323 reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), 14324 static_cast<VkQueryType>( queryType ), 14325 static_cast<VkQueryPool>( queryPool ), 14326 firstQuery ); 14327 } 14328 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14329 14330 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 14331 template <typename Dispatch> compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const14332 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( 14333 VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14334 { 14335 return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 14336 } 14337 #else 14338 template <typename Dispatch> 14339 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t shader,Dispatch const & d) const14340 Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const 14341 { 14342 Result result = 14343 static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); 14344 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); 14345 } 14346 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14347 14348 //=== VK_KHR_maintenance3 === 14349 14350 template <typename Dispatch> 14351 VULKAN_HPP_INLINE void getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const14352 Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, 14353 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, 14354 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14355 { 14356 d.vkGetDescriptorSetLayoutSupportKHR( m_device, 14357 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), 14358 reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); 14359 } 14360 14361 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14362 template <typename Dispatch> 14363 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(const DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const14364 Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, 14365 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14366 { 14367 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; 14368 d.vkGetDescriptorSetLayoutSupportKHR( m_device, 14369 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 14370 reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 14371 return support; 14372 } 14373 14374 template <typename X, typename Y, typename... Z, typename Dispatch> 14375 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(const DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const14376 Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, 14377 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14378 { 14379 StructureChain<X, Y, Z...> structureChain; 14380 VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = 14381 structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); 14382 d.vkGetDescriptorSetLayoutSupportKHR( m_device, 14383 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), 14384 reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); 14385 return structureChain; 14386 } 14387 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14388 14389 //=== VK_KHR_draw_indirect_count === 14390 14391 template <typename Dispatch> drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const14392 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 14393 VULKAN_HPP_NAMESPACE::DeviceSize offset, 14394 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 14395 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 14396 uint32_t maxDrawCount, 14397 uint32_t stride, 14398 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14399 { 14400 d.vkCmdDrawIndirectCountKHR( m_commandBuffer, 14401 static_cast<VkBuffer>( buffer ), 14402 static_cast<VkDeviceSize>( offset ), 14403 static_cast<VkBuffer>( countBuffer ), 14404 static_cast<VkDeviceSize>( countBufferOffset ), 14405 maxDrawCount, 14406 stride ); 14407 } 14408 14409 template <typename Dispatch> drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const14410 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, 14411 VULKAN_HPP_NAMESPACE::DeviceSize offset, 14412 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 14413 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 14414 uint32_t maxDrawCount, 14415 uint32_t stride, 14416 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14417 { 14418 d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, 14419 static_cast<VkBuffer>( buffer ), 14420 static_cast<VkDeviceSize>( offset ), 14421 static_cast<VkBuffer>( countBuffer ), 14422 static_cast<VkDeviceSize>( countBufferOffset ), 14423 maxDrawCount, 14424 stride ); 14425 } 14426 14427 //=== VK_EXT_external_memory_host === 14428 14429 template <typename Dispatch> getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,Dispatch const & d) const14430 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( 14431 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 14432 const void * pHostPointer, 14433 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, 14434 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14435 { 14436 return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( 14437 m_device, 14438 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 14439 pHostPointer, 14440 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); 14441 } 14442 14443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14444 template <typename Dispatch> 14445 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 14446 typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,Dispatch const & d) const14447 Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 14448 const void * pHostPointer, 14449 Dispatch const & d ) const 14450 { 14451 VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; 14452 Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( 14453 m_device, 14454 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 14455 pHostPointer, 14456 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) ); 14457 return createResultValue( 14458 result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); 14459 } 14460 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14461 14462 //=== VK_AMD_buffer_marker === 14463 14464 template <typename Dispatch> writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const14465 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, 14466 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 14467 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 14468 uint32_t marker, 14469 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14470 { 14471 d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, 14472 static_cast<VkPipelineStageFlagBits>( pipelineStage ), 14473 static_cast<VkBuffer>( dstBuffer ), 14474 static_cast<VkDeviceSize>( dstOffset ), 14475 marker ); 14476 } 14477 14478 //=== VK_EXT_calibrated_timestamps === 14479 14480 template <typename Dispatch> 14481 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getCalibrateableTimeDomainsEXT(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,Dispatch const & d) const14482 PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, 14483 VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, 14484 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14485 { 14486 return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( 14487 m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) ); 14488 } 14489 14490 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14491 template <typename TimeDomainEXTAllocator, typename Dispatch> 14492 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14493 typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const & d) const14494 PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const 14495 { 14496 std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains; 14497 uint32_t timeDomainCount; 14498 Result result; 14499 do 14500 { 14501 result = static_cast<Result>( 14502 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 14503 if ( ( result == Result::eSuccess ) && timeDomainCount ) 14504 { 14505 timeDomains.resize( timeDomainCount ); 14506 result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( 14507 m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) ); 14508 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 14509 } 14510 } while ( result == Result::eIncomplete ); 14511 if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) ) 14512 { 14513 timeDomains.resize( timeDomainCount ); 14514 } 14515 return createResultValue( 14516 result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 14517 } 14518 14519 template <typename TimeDomainEXTAllocator, 14520 typename Dispatch, 14521 typename B, 14522 typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type> 14523 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 14524 typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type getCalibrateableTimeDomainsEXT(TimeDomainEXTAllocator & timeDomainEXTAllocator,Dispatch const & d) const14525 PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, 14526 Dispatch const & d ) const 14527 { 14528 std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator ); 14529 uint32_t timeDomainCount; 14530 Result result; 14531 do 14532 { 14533 result = static_cast<Result>( 14534 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); 14535 if ( ( result == Result::eSuccess ) && timeDomainCount ) 14536 { 14537 timeDomains.resize( timeDomainCount ); 14538 result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( 14539 m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) ); 14540 VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); 14541 } 14542 } while ( result == Result::eIncomplete ); 14543 if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) ) 14544 { 14545 timeDomains.resize( timeDomainCount ); 14546 } 14547 return createResultValue( 14548 result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); 14549 } 14550 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14551 14552 template <typename Dispatch> 14553 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getCalibratedTimestampsEXT(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const14554 Device::getCalibratedTimestampsEXT( uint32_t timestampCount, 14555 const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, 14556 uint64_t * pTimestamps, 14557 uint64_t * pMaxDeviation, 14558 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14559 { 14560 return static_cast<Result>( 14561 d.vkGetCalibratedTimestampsEXT( m_device, 14562 timestampCount, 14563 reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), 14564 pTimestamps, 14565 pMaxDeviation ) ); 14566 } 14567 14568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14569 template <typename Dispatch> 14570 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 14571 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE getCalibratedTimestampsEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,ArrayProxy<uint64_t> const & timestamps,Dispatch const & d) const14572 typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( 14573 ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, 14574 ArrayProxy<uint64_t> const & timestamps, 14575 Dispatch const & d ) const 14576 { 14577 # ifdef VULKAN_HPP_NO_EXCEPTIONS 14578 VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); 14579 # else 14580 if ( timestampInfos.size() != timestamps.size() ) 14581 { 14582 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 14583 "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); 14584 } 14585 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 14586 uint64_t maxDeviation; 14587 Result result = static_cast<Result>( 14588 d.vkGetCalibratedTimestampsEXT( m_device, 14589 timestampInfos.size(), 14590 reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), 14591 timestamps.data(), 14592 &maxDeviation ) ); 14593 return createResultValue( 14594 result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 14595 } 14596 14597 template <typename Uint64_tAllocator, typename Dispatch> 14598 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 14599 typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,Dispatch const & d) const14600 Device::getCalibratedTimestampsEXT( 14601 ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, 14602 Dispatch const & d ) const 14603 { 14604 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data( 14605 std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); 14606 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first; 14607 uint64_t & maxDeviation = data.second; 14608 Result result = static_cast<Result>( 14609 d.vkGetCalibratedTimestampsEXT( m_device, 14610 timestampInfos.size(), 14611 reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), 14612 timestamps.data(), 14613 &maxDeviation ) ); 14614 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 14615 } 14616 14617 template <typename Uint64_tAllocator, 14618 typename Dispatch, 14619 typename B, 14620 typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type> 14621 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 14622 typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const14623 Device::getCalibratedTimestampsEXT( 14624 ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, 14625 Uint64_tAllocator & uint64_tAllocator, 14626 Dispatch const & d ) const 14627 { 14628 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data( 14629 std::piecewise_construct, 14630 std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), 14631 std::forward_as_tuple( 0 ) ); 14632 std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first; 14633 uint64_t & maxDeviation = data.second; 14634 Result result = static_cast<Result>( 14635 d.vkGetCalibratedTimestampsEXT( m_device, 14636 timestampInfos.size(), 14637 reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), 14638 timestamps.data(), 14639 &maxDeviation ) ); 14640 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); 14641 } 14642 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14643 14644 //=== VK_NV_mesh_shader === 14645 14646 template <typename Dispatch> drawMeshTasksNV(uint32_t taskCount,uint32_t firstTask,Dispatch const & d) const14647 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, 14648 uint32_t firstTask, 14649 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14650 { 14651 d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); 14652 } 14653 14654 template <typename Dispatch> drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const14655 VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 14656 VULKAN_HPP_NAMESPACE::DeviceSize offset, 14657 uint32_t drawCount, 14658 uint32_t stride, 14659 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14660 { 14661 d.vkCmdDrawMeshTasksIndirectNV( 14662 m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); 14663 } 14664 14665 template <typename Dispatch> 14666 VULKAN_HPP_INLINE void drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const14667 CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, 14668 VULKAN_HPP_NAMESPACE::DeviceSize offset, 14669 VULKAN_HPP_NAMESPACE::Buffer countBuffer, 14670 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, 14671 uint32_t maxDrawCount, 14672 uint32_t stride, 14673 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14674 { 14675 d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, 14676 static_cast<VkBuffer>( buffer ), 14677 static_cast<VkDeviceSize>( offset ), 14678 static_cast<VkBuffer>( countBuffer ), 14679 static_cast<VkDeviceSize>( countBufferOffset ), 14680 maxDrawCount, 14681 stride ); 14682 } 14683 14684 //=== VK_NV_scissor_exclusive === 14685 14686 template <typename Dispatch> setExclusiveScissorNV(uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,Dispatch const & d) const14687 VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 14688 uint32_t exclusiveScissorCount, 14689 const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, 14690 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14691 { 14692 d.vkCmdSetExclusiveScissorNV( m_commandBuffer, 14693 firstExclusiveScissor, 14694 exclusiveScissorCount, 14695 reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); 14696 } 14697 14698 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14699 template <typename Dispatch> 14700 VULKAN_HPP_INLINE void setExclusiveScissorNV(uint32_t firstExclusiveScissor,ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,Dispatch const & d) const14701 CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, 14702 ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, 14703 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14704 { 14705 d.vkCmdSetExclusiveScissorNV( m_commandBuffer, 14706 firstExclusiveScissor, 14707 exclusiveScissors.size(), 14708 reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) ); 14709 } 14710 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14711 14712 //=== VK_NV_device_diagnostic_checkpoints === 14713 14714 template <typename Dispatch> setCheckpointNV(const void * pCheckpointMarker,Dispatch const & d) const14715 VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, 14716 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14717 { 14718 d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); 14719 } 14720 14721 template <typename Dispatch> getCheckpointDataNV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,Dispatch const & d) const14722 VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, 14723 VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, 14724 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14725 { 14726 d.vkGetQueueCheckpointDataNV( 14727 m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); 14728 } 14729 14730 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14731 template <typename CheckpointDataNVAllocator, typename Dispatch> 14732 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(Dispatch const & d) const14733 Queue::getCheckpointDataNV( Dispatch const & d ) const 14734 { 14735 std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData; 14736 uint32_t checkpointDataCount; 14737 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 14738 checkpointData.resize( checkpointDataCount ); 14739 d.vkGetQueueCheckpointDataNV( 14740 m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 14741 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 14742 return checkpointData; 14743 } 14744 14745 template <typename CheckpointDataNVAllocator, 14746 typename Dispatch, 14747 typename B, 14748 typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type> 14749 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV(CheckpointDataNVAllocator & checkpointDataNVAllocator,Dispatch const & d) const14750 Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const 14751 { 14752 std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator ); 14753 uint32_t checkpointDataCount; 14754 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); 14755 checkpointData.resize( checkpointDataCount ); 14756 d.vkGetQueueCheckpointDataNV( 14757 m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); 14758 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 14759 return checkpointData; 14760 } 14761 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14762 14763 //=== VK_KHR_timeline_semaphore === 14764 14765 template <typename Dispatch> getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const14766 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( 14767 VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14768 { 14769 return static_cast<Result>( 14770 d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); 14771 } 14772 14773 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14774 template <typename Dispatch> 14775 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const14776 Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const 14777 { 14778 uint64_t value; 14779 Result result = 14780 static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); 14781 return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); 14782 } 14783 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14784 14785 template <typename Dispatch> 14786 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const14787 Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, 14788 uint64_t timeout, 14789 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14790 { 14791 return static_cast<Result>( 14792 d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); 14793 } 14794 14795 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14796 template <typename Dispatch> waitSemaphoresKHR(const SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const14797 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, 14798 uint64_t timeout, 14799 Dispatch const & d ) const 14800 { 14801 Result result = static_cast<Result>( 14802 d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); 14803 return createResultValue( result, 14804 VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", 14805 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); 14806 } 14807 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14808 14809 template <typename Dispatch> signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const14810 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( 14811 const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14812 { 14813 return static_cast<Result>( 14814 d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); 14815 } 14816 14817 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14818 template <typename Dispatch> 14819 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type signalSemaphoreKHR(const SemaphoreSignalInfo & signalInfo,Dispatch const & d) const14820 Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const 14821 { 14822 Result result = static_cast<Result>( 14823 d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); 14824 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); 14825 } 14826 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14827 14828 //=== VK_INTEL_performance_query === 14829 14830 template <typename Dispatch> initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,Dispatch const & d) const14831 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( 14832 const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, 14833 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14834 { 14835 return static_cast<Result>( d.vkInitializePerformanceApiINTEL( 14836 m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); 14837 } 14838 14839 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14840 template <typename Dispatch> 14841 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type initializePerformanceApiINTEL(const InitializePerformanceApiInfoINTEL & initializeInfo,Dispatch const & d) const14842 Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, 14843 Dispatch const & d ) const 14844 { 14845 Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL( 14846 m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) ); 14847 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); 14848 } 14849 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14850 14851 template <typename Dispatch> uninitializePerformanceApiINTEL(Dispatch const & d) const14852 VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14853 { 14854 d.vkUninitializePerformanceApiINTEL( m_device ); 14855 } 14856 14857 template <typename Dispatch> setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const14858 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( 14859 const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14860 { 14861 return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( 14862 m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); 14863 } 14864 14865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14866 template <typename Dispatch> 14867 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceMarkerINTEL(const PerformanceMarkerInfoINTEL & markerInfo,Dispatch const & d) const14868 CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const 14869 { 14870 Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( 14871 m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) ); 14872 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); 14873 } 14874 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14875 14876 template <typename Dispatch> setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,Dispatch const & d) const14877 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( 14878 const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, 14879 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14880 { 14881 return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( 14882 m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); 14883 } 14884 14885 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14886 template <typename Dispatch> 14887 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL(const PerformanceStreamMarkerInfoINTEL & markerInfo,Dispatch const & d) const14888 CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, 14889 Dispatch const & d ) const 14890 { 14891 Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( 14892 m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) ); 14893 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); 14894 } 14895 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14896 14897 template <typename Dispatch> setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,Dispatch const & d) const14898 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( 14899 const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, 14900 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14901 { 14902 return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( 14903 m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); 14904 } 14905 14906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14907 template <typename Dispatch> 14908 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceOverrideINTEL(const PerformanceOverrideInfoINTEL & overrideInfo,Dispatch const & d) const14909 CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, 14910 Dispatch const & d ) const 14911 { 14912 Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( 14913 m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) ); 14914 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); 14915 } 14916 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14917 14918 template <typename Dispatch> acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,Dispatch const & d) const14919 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( 14920 const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, 14921 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, 14922 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14923 { 14924 return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( 14925 m_device, 14926 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), 14927 reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); 14928 } 14929 14930 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 14931 template <typename Dispatch> 14932 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 14933 typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL(const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const14934 Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, 14935 Dispatch const & d ) const 14936 { 14937 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 14938 Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( 14939 m_device, 14940 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 14941 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 14942 return createResultValue( 14943 result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); 14944 } 14945 14946 # ifndef VULKAN_HPP_NO_SMART_HANDLE 14947 template <typename Dispatch> 14948 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 14949 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique(const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,Dispatch const & d) const14950 Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, 14951 Dispatch const & d ) const 14952 { 14953 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; 14954 Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( 14955 m_device, 14956 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), 14957 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); 14958 ObjectRelease<Device, Dispatch> deleter( *this, d ); 14959 return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( 14960 result, 14961 configuration, 14962 VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique", 14963 deleter ); 14964 } 14965 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 14966 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14967 14968 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 14969 template <typename Dispatch> releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const14970 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( 14971 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14972 { 14973 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( 14974 m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 14975 } 14976 #else 14977 template <typename Dispatch> 14978 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const14979 Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 14980 Dispatch const & d ) const 14981 { 14982 Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( 14983 m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 14984 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); 14985 } 14986 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 14987 14988 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 14989 template <typename Dispatch> release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const14990 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( 14991 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 14992 { 14993 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( 14994 m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 14995 } 14996 #else 14997 template <typename Dispatch> 14998 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const14999 Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const 15000 { 15001 Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( 15002 m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 15003 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); 15004 } 15005 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15006 15007 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 15008 template <typename Dispatch> setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const15009 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( 15010 VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15011 { 15012 return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( 15013 m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 15014 } 15015 #else 15016 template <typename Dispatch> 15017 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,Dispatch const & d) const15018 Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, 15019 Dispatch const & d ) const 15020 { 15021 Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( 15022 m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); 15023 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); 15024 } 15025 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15026 15027 template <typename Dispatch> 15028 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,Dispatch const & d) const15029 Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, 15030 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, 15031 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15032 { 15033 return static_cast<Result>( 15034 d.vkGetPerformanceParameterINTEL( m_device, 15035 static_cast<VkPerformanceParameterTypeINTEL>( parameter ), 15036 reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); 15037 } 15038 15039 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15040 template <typename Dispatch> 15041 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15042 typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,Dispatch const & d) const15043 Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, 15044 Dispatch const & d ) const 15045 { 15046 VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; 15047 Result result = 15048 static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, 15049 static_cast<VkPerformanceParameterTypeINTEL>( parameter ), 15050 reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) ); 15051 return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); 15052 } 15053 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15054 15055 //=== VK_AMD_display_native_hdr === 15056 15057 template <typename Dispatch> setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,Dispatch const & d) const15058 VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, 15059 VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, 15060 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15061 { 15062 d.vkSetLocalDimmingAMD( 15063 m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); 15064 } 15065 15066 #if defined( VK_USE_PLATFORM_FUCHSIA ) 15067 //=== VK_FUCHSIA_imagepipe_surface === 15068 15069 template <typename Dispatch> createImagePipeSurfaceFUCHSIA(const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15070 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( 15071 const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, 15072 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15073 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15074 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15075 { 15076 return static_cast<Result>( 15077 d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, 15078 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), 15079 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15080 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15081 } 15082 15083 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15084 template <typename Dispatch> 15085 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15086 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA(const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const15087 Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 15088 Optional<const AllocationCallbacks> allocator, 15089 Dispatch const & d ) const 15090 { 15091 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15092 Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 15093 m_instance, 15094 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 15095 reinterpret_cast<const VkAllocationCallbacks *>( 15096 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15097 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15098 return createResultValue( 15099 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); 15100 } 15101 15102 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15103 template <typename Dispatch> 15104 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15105 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique(const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const15106 Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, 15107 Optional<const AllocationCallbacks> allocator, 15108 Dispatch const & d ) const 15109 { 15110 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15111 Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( 15112 m_instance, 15113 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), 15114 reinterpret_cast<const VkAllocationCallbacks *>( 15115 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15116 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15117 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 15118 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 15119 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); 15120 } 15121 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 15122 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15123 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 15124 15125 #if defined( VK_USE_PLATFORM_METAL_EXT ) 15126 //=== VK_EXT_metal_surface === 15127 15128 template <typename Dispatch> 15129 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createMetalSurfaceEXT(const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15130 Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, 15131 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15132 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15133 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15134 { 15135 return static_cast<Result>( 15136 d.vkCreateMetalSurfaceEXT( m_instance, 15137 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), 15138 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15139 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15140 } 15141 15142 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15143 template <typename Dispatch> 15144 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15145 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT(const MetalSurfaceCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const15146 Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, 15147 Optional<const AllocationCallbacks> allocator, 15148 Dispatch const & d ) const 15149 { 15150 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15151 Result result = static_cast<Result>( 15152 d.vkCreateMetalSurfaceEXT( m_instance, 15153 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 15154 reinterpret_cast<const VkAllocationCallbacks *>( 15155 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15156 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15157 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); 15158 } 15159 15160 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15161 template <typename Dispatch> 15162 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15163 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique(const MetalSurfaceCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const15164 Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, 15165 Optional<const AllocationCallbacks> allocator, 15166 Dispatch const & d ) const 15167 { 15168 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15169 Result result = static_cast<Result>( 15170 d.vkCreateMetalSurfaceEXT( m_instance, 15171 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), 15172 reinterpret_cast<const VkAllocationCallbacks *>( 15173 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15174 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15175 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 15176 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 15177 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter ); 15178 } 15179 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 15180 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15181 #endif /*VK_USE_PLATFORM_METAL_EXT*/ 15182 15183 //=== VK_KHR_fragment_shading_rate === 15184 15185 template <typename Dispatch> getFragmentShadingRatesKHR(uint32_t * pFragmentShadingRateCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,Dispatch const & d) const15186 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( 15187 uint32_t * pFragmentShadingRateCount, 15188 VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, 15189 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15190 { 15191 return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 15192 m_physicalDevice, 15193 pFragmentShadingRateCount, 15194 reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); 15195 } 15196 15197 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15198 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch> 15199 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 15200 std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(Dispatch const & d) const15201 PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const 15202 { 15203 std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> 15204 fragmentShadingRates; 15205 uint32_t fragmentShadingRateCount; 15206 Result result; 15207 do 15208 { 15209 result = static_cast<Result>( 15210 d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 15211 if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) 15212 { 15213 fragmentShadingRates.resize( fragmentShadingRateCount ); 15214 result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 15215 m_physicalDevice, 15216 &fragmentShadingRateCount, 15217 reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 15218 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 15219 } 15220 } while ( result == Result::eIncomplete ); 15221 if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) ) 15222 { 15223 fragmentShadingRates.resize( fragmentShadingRateCount ); 15224 } 15225 return createResultValue( 15226 result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 15227 } 15228 15229 template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, 15230 typename Dispatch, 15231 typename B, 15232 typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, 15233 int>::type> 15234 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 15235 std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR(PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,Dispatch const & d) const15236 PhysicalDevice::getFragmentShadingRatesKHR( 15237 PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, 15238 Dispatch const & d ) const 15239 { 15240 std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> 15241 fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator ); 15242 uint32_t fragmentShadingRateCount; 15243 Result result; 15244 do 15245 { 15246 result = static_cast<Result>( 15247 d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); 15248 if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) 15249 { 15250 fragmentShadingRates.resize( fragmentShadingRateCount ); 15251 result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( 15252 m_physicalDevice, 15253 &fragmentShadingRateCount, 15254 reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); 15255 VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); 15256 } 15257 } while ( result == Result::eIncomplete ); 15258 if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) ) 15259 { 15260 fragmentShadingRates.resize( fragmentShadingRateCount ); 15261 } 15262 return createResultValue( 15263 result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); 15264 } 15265 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15266 15267 template <typename Dispatch> setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const15268 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( 15269 const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, 15270 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 15271 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15272 { 15273 d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, 15274 reinterpret_cast<const VkExtent2D *>( pFragmentSize ), 15275 reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 15276 } 15277 15278 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15279 template <typename Dispatch> setFragmentShadingRateKHR(const Extent2D & fragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const15280 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( 15281 const Extent2D & fragmentSize, 15282 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 15283 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15284 { 15285 d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, 15286 reinterpret_cast<const VkExtent2D *>( &fragmentSize ), 15287 reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 15288 } 15289 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15290 15291 //=== VK_EXT_buffer_device_address === 15292 15293 template <typename Dispatch> getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const15294 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( 15295 const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15296 { 15297 return static_cast<DeviceAddress>( 15298 d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 15299 } 15300 15301 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15302 template <typename Dispatch> getBufferAddressEXT(const BufferDeviceAddressInfo & info,Dispatch const & d) const15303 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, 15304 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15305 { 15306 return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 15307 } 15308 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15309 15310 //=== VK_EXT_tooling_info === 15311 15312 template <typename Dispatch> 15313 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getToolPropertiesEXT(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties,Dispatch const & d) const15314 PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, 15315 VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, 15316 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15317 { 15318 return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( 15319 m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( pToolProperties ) ) ); 15320 } 15321 15322 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15323 template <typename PhysicalDeviceToolPropertiesEXTAllocator, typename Dispatch> 15324 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 15325 std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type getToolPropertiesEXT(Dispatch const & d) const15326 PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const 15327 { 15328 std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties; 15329 uint32_t toolCount; 15330 Result result; 15331 do 15332 { 15333 result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 15334 if ( ( result == Result::eSuccess ) && toolCount ) 15335 { 15336 toolProperties.resize( toolCount ); 15337 result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( 15338 m_physicalDevice, 15339 &toolCount, 15340 reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) ); 15341 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 15342 } 15343 } while ( result == Result::eIncomplete ); 15344 if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) ) 15345 { 15346 toolProperties.resize( toolCount ); 15347 } 15348 return createResultValue( 15349 result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 15350 } 15351 15352 template < 15353 typename PhysicalDeviceToolPropertiesEXTAllocator, 15354 typename Dispatch, 15355 typename B, 15356 typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type> 15357 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 15358 std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type getToolPropertiesEXT(PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator,Dispatch const & d) const15359 PhysicalDevice::getToolPropertiesEXT( 15360 PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const 15361 { 15362 std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties( 15363 physicalDeviceToolPropertiesEXTAllocator ); 15364 uint32_t toolCount; 15365 Result result; 15366 do 15367 { 15368 result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); 15369 if ( ( result == Result::eSuccess ) && toolCount ) 15370 { 15371 toolProperties.resize( toolCount ); 15372 result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( 15373 m_physicalDevice, 15374 &toolCount, 15375 reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) ); 15376 VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); 15377 } 15378 } while ( result == Result::eIncomplete ); 15379 if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) ) 15380 { 15381 toolProperties.resize( toolCount ); 15382 } 15383 return createResultValue( 15384 result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); 15385 } 15386 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15387 15388 //=== VK_NV_cooperative_matrix === 15389 15390 template <typename Dispatch> 15391 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getCooperativeMatrixPropertiesNV(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,Dispatch const & d) const15392 PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, 15393 VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, 15394 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15395 { 15396 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 15397 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); 15398 } 15399 15400 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15401 template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch> 15402 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15403 typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const & d) const15404 PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const 15405 { 15406 std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties; 15407 uint32_t propertyCount; 15408 Result result; 15409 do 15410 { 15411 result = static_cast<Result>( 15412 d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 15413 if ( ( result == Result::eSuccess ) && propertyCount ) 15414 { 15415 properties.resize( propertyCount ); 15416 result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 15417 m_physicalDevice, 15418 &propertyCount, 15419 reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 15420 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15421 } 15422 } while ( result == Result::eIncomplete ); 15423 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 15424 { 15425 properties.resize( propertyCount ); 15426 } 15427 return createResultValue( 15428 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 15429 } 15430 15431 template < 15432 typename CooperativeMatrixPropertiesNVAllocator, 15433 typename Dispatch, 15434 typename B, 15435 typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type> 15436 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15437 typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV(CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,Dispatch const & d) const15438 PhysicalDevice::getCooperativeMatrixPropertiesNV( 15439 CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const 15440 { 15441 std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( 15442 cooperativeMatrixPropertiesNVAllocator ); 15443 uint32_t propertyCount; 15444 Result result; 15445 do 15446 { 15447 result = static_cast<Result>( 15448 d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); 15449 if ( ( result == Result::eSuccess ) && propertyCount ) 15450 { 15451 properties.resize( propertyCount ); 15452 result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( 15453 m_physicalDevice, 15454 &propertyCount, 15455 reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); 15456 VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); 15457 } 15458 } while ( result == Result::eIncomplete ); 15459 if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) 15460 { 15461 properties.resize( propertyCount ); 15462 } 15463 return createResultValue( 15464 result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); 15465 } 15466 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15467 15468 //=== VK_NV_coverage_reduction_mode === 15469 15470 template <typename Dispatch> getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t * pCombinationCount,VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,Dispatch const & d) const15471 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 15472 uint32_t * pCombinationCount, 15473 VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, 15474 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15475 { 15476 return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 15477 m_physicalDevice, 15478 pCombinationCount, 15479 reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); 15480 } 15481 15482 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15483 template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch> 15484 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 15485 std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const & d) const15486 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const 15487 { 15488 std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations; 15489 uint32_t combinationCount; 15490 Result result; 15491 do 15492 { 15493 result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 15494 m_physicalDevice, &combinationCount, nullptr ) ); 15495 if ( ( result == Result::eSuccess ) && combinationCount ) 15496 { 15497 combinations.resize( combinationCount ); 15498 result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 15499 m_physicalDevice, 15500 &combinationCount, 15501 reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 15502 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 15503 } 15504 } while ( result == Result::eIncomplete ); 15505 if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) ) 15506 { 15507 combinations.resize( combinationCount ); 15508 } 15509 return createResultValue( result, 15510 combinations, 15511 VULKAN_HPP_NAMESPACE_STRING 15512 "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 15513 } 15514 15515 template <typename FramebufferMixedSamplesCombinationNVAllocator, 15516 typename Dispatch, 15517 typename B, 15518 typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, 15519 int>::type> 15520 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 15521 std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,Dispatch const & d) const15522 PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( 15523 FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, 15524 Dispatch const & d ) const 15525 { 15526 std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( 15527 framebufferMixedSamplesCombinationNVAllocator ); 15528 uint32_t combinationCount; 15529 Result result; 15530 do 15531 { 15532 result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 15533 m_physicalDevice, &combinationCount, nullptr ) ); 15534 if ( ( result == Result::eSuccess ) && combinationCount ) 15535 { 15536 combinations.resize( combinationCount ); 15537 result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( 15538 m_physicalDevice, 15539 &combinationCount, 15540 reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); 15541 VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); 15542 } 15543 } while ( result == Result::eIncomplete ); 15544 if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) ) 15545 { 15546 combinations.resize( combinationCount ); 15547 } 15548 return createResultValue( result, 15549 combinations, 15550 VULKAN_HPP_NAMESPACE_STRING 15551 "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); 15552 } 15553 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15554 15555 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 15556 //=== VK_EXT_full_screen_exclusive === 15557 15558 template <typename Dispatch> getSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const15559 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( 15560 const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 15561 uint32_t * pPresentModeCount, 15562 VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, 15563 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15564 { 15565 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 15566 m_physicalDevice, 15567 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 15568 pPresentModeCount, 15569 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); 15570 } 15571 15572 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15573 template <typename PresentModeKHRAllocator, typename Dispatch> 15574 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15575 typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const15576 PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 15577 Dispatch const & d ) const 15578 { 15579 std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes; 15580 uint32_t presentModeCount; 15581 Result result; 15582 do 15583 { 15584 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 15585 m_physicalDevice, 15586 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15587 &presentModeCount, 15588 nullptr ) ); 15589 if ( ( result == Result::eSuccess ) && presentModeCount ) 15590 { 15591 presentModes.resize( presentModeCount ); 15592 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 15593 m_physicalDevice, 15594 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15595 &presentModeCount, 15596 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 15597 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 15598 } 15599 } while ( result == Result::eIncomplete ); 15600 if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) 15601 { 15602 presentModes.resize( presentModeCount ); 15603 } 15604 return createResultValue( 15605 result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 15606 } 15607 15608 template <typename PresentModeKHRAllocator, 15609 typename Dispatch, 15610 typename B, 15611 typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type> 15612 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 15613 typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const15614 PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 15615 PresentModeKHRAllocator & presentModeKHRAllocator, 15616 Dispatch const & d ) const 15617 { 15618 std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); 15619 uint32_t presentModeCount; 15620 Result result; 15621 do 15622 { 15623 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 15624 m_physicalDevice, 15625 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15626 &presentModeCount, 15627 nullptr ) ); 15628 if ( ( result == Result::eSuccess ) && presentModeCount ) 15629 { 15630 presentModes.resize( presentModeCount ); 15631 result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( 15632 m_physicalDevice, 15633 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15634 &presentModeCount, 15635 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); 15636 VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); 15637 } 15638 } while ( result == Result::eIncomplete ); 15639 if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) 15640 { 15641 presentModes.resize( presentModeCount ); 15642 } 15643 return createResultValue( 15644 result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); 15645 } 15646 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15647 15648 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 15649 template <typename Dispatch> acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const15650 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( 15651 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15652 { 15653 return static_cast<Result>( 15654 d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 15655 } 15656 # else 15657 template <typename Dispatch> 15658 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const15659 Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 15660 { 15661 Result result = static_cast<Result>( 15662 d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 15663 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); 15664 } 15665 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15666 15667 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 15668 template <typename Dispatch> releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const15669 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( 15670 VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15671 { 15672 return static_cast<Result>( 15673 d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 15674 } 15675 # else 15676 template <typename Dispatch> 15677 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const15678 Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const 15679 { 15680 Result result = static_cast<Result>( 15681 d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); 15682 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); 15683 } 15684 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15685 15686 template <typename Dispatch> 15687 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getGroupSurfacePresentModes2EXT(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const15688 Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, 15689 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, 15690 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15691 { 15692 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 15693 m_device, 15694 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), 15695 reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); 15696 } 15697 15698 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15699 template <typename Dispatch> 15700 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15701 typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT(const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const15702 Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, 15703 Dispatch const & d ) const 15704 { 15705 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; 15706 Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( 15707 m_device, 15708 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), 15709 reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); 15710 return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); 15711 } 15712 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15713 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 15714 15715 //=== VK_EXT_headless_surface === 15716 15717 template <typename Dispatch> 15718 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const15719 Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, 15720 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 15721 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 15722 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15723 { 15724 return static_cast<Result>( 15725 d.vkCreateHeadlessSurfaceEXT( m_instance, 15726 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), 15727 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 15728 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 15729 } 15730 15731 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15732 template <typename Dispatch> 15733 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15734 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT(const HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const15735 Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, 15736 Optional<const AllocationCallbacks> allocator, 15737 Dispatch const & d ) const 15738 { 15739 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15740 Result result = static_cast<Result>( 15741 d.vkCreateHeadlessSurfaceEXT( m_instance, 15742 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 15743 reinterpret_cast<const VkAllocationCallbacks *>( 15744 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15745 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15746 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); 15747 } 15748 15749 # ifndef VULKAN_HPP_NO_SMART_HANDLE 15750 template <typename Dispatch> 15751 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 15752 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique(const HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const15753 Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, 15754 Optional<const AllocationCallbacks> allocator, 15755 Dispatch const & d ) const 15756 { 15757 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 15758 Result result = static_cast<Result>( 15759 d.vkCreateHeadlessSurfaceEXT( m_instance, 15760 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), 15761 reinterpret_cast<const VkAllocationCallbacks *>( 15762 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 15763 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 15764 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 15765 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 15766 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter ); 15767 } 15768 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 15769 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15770 15771 //=== VK_KHR_buffer_device_address === 15772 15773 template <typename Dispatch> getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const15774 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( 15775 const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15776 { 15777 return static_cast<DeviceAddress>( 15778 d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); 15779 } 15780 15781 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15782 template <typename Dispatch> getBufferAddressKHR(const BufferDeviceAddressInfo & info,Dispatch const & d) const15783 VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, 15784 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15785 { 15786 return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 15787 } 15788 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15789 15790 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const15791 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( 15792 const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15793 { 15794 return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, 15795 reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); 15796 } 15797 15798 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15799 template <typename Dispatch> getBufferOpaqueCaptureAddressKHR(const BufferDeviceAddressInfo & info,Dispatch const & d) const15800 VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, 15801 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15802 { 15803 return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, 15804 reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); 15805 } 15806 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15807 15808 template <typename Dispatch> 15809 VULKAN_HPP_INLINE uint64_t getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const15810 Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, 15811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15812 { 15813 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( 15814 m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); 15815 } 15816 15817 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15818 template <typename Dispatch> getMemoryOpaqueCaptureAddressKHR(const DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const15819 VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( 15820 const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15821 { 15822 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( 15823 m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); 15824 } 15825 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15826 15827 //=== VK_EXT_line_rasterization === 15828 15829 template <typename Dispatch> setLineStippleEXT(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const15830 VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, 15831 uint16_t lineStipplePattern, 15832 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15833 { 15834 d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); 15835 } 15836 15837 //=== VK_EXT_host_query_reset === 15838 15839 template <typename Dispatch> resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const15840 VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, 15841 uint32_t firstQuery, 15842 uint32_t queryCount, 15843 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15844 { 15845 d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); 15846 } 15847 15848 //=== VK_EXT_extended_dynamic_state === 15849 15850 template <typename Dispatch> setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const15851 VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, 15852 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15853 { 15854 d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); 15855 } 15856 15857 template <typename Dispatch> setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const15858 VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, 15859 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15860 { 15861 d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); 15862 } 15863 15864 template <typename Dispatch> 15865 VULKAN_HPP_INLINE void setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const15866 CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, 15867 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15868 { 15869 d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); 15870 } 15871 15872 template <typename Dispatch> setViewportWithCountEXT(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const15873 VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, 15874 const VULKAN_HPP_NAMESPACE::Viewport * pViewports, 15875 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15876 { 15877 d.vkCmdSetViewportWithCountEXT( 15878 m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); 15879 } 15880 15881 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15882 template <typename Dispatch> 15883 VULKAN_HPP_INLINE void setViewportWithCountEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const15884 CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, 15885 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15886 { 15887 d.vkCmdSetViewportWithCountEXT( 15888 m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); 15889 } 15890 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15891 15892 template <typename Dispatch> setScissorWithCountEXT(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const15893 VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, 15894 const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, 15895 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15896 { 15897 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); 15898 } 15899 15900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15901 template <typename Dispatch> 15902 VULKAN_HPP_INLINE void setScissorWithCountEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const15903 CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, 15904 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15905 { 15906 d.vkCmdSetScissorWithCountEXT( 15907 m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); 15908 } 15909 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15910 15911 template <typename Dispatch> bindVertexBuffers2EXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const15912 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 15913 uint32_t bindingCount, 15914 const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, 15915 const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, 15916 const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, 15917 const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, 15918 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15919 { 15920 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 15921 firstBinding, 15922 bindingCount, 15923 reinterpret_cast<const VkBuffer *>( pBuffers ), 15924 reinterpret_cast<const VkDeviceSize *>( pOffsets ), 15925 reinterpret_cast<const VkDeviceSize *>( pSizes ), 15926 reinterpret_cast<const VkDeviceSize *>( pStrides ) ); 15927 } 15928 15929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 15930 template <typename Dispatch> 15931 VULKAN_HPP_INLINE void bindVertexBuffers2EXT(uint32_t firstBinding,ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const15932 CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, 15933 ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, 15934 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, 15935 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, 15936 ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, 15937 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 15938 { 15939 # ifdef VULKAN_HPP_NO_EXCEPTIONS 15940 VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); 15941 VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); 15942 VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); 15943 # else 15944 if ( buffers.size() != offsets.size() ) 15945 { 15946 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 15947 "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); 15948 } 15949 if ( !sizes.empty() && buffers.size() != sizes.size() ) 15950 { 15951 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 15952 "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); 15953 } 15954 if ( !strides.empty() && buffers.size() != strides.size() ) 15955 { 15956 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 15957 "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); 15958 } 15959 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 15960 15961 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, 15962 firstBinding, 15963 buffers.size(), 15964 reinterpret_cast<const VkBuffer *>( buffers.data() ), 15965 reinterpret_cast<const VkDeviceSize *>( offsets.data() ), 15966 reinterpret_cast<const VkDeviceSize *>( sizes.data() ), 15967 reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); 15968 } 15969 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 15970 15971 template <typename Dispatch> setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const15972 VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, 15973 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15974 { 15975 d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); 15976 } 15977 15978 template <typename Dispatch> setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const15979 VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, 15980 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15981 { 15982 d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); 15983 } 15984 15985 template <typename Dispatch> setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const15986 VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, 15987 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15988 { 15989 d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); 15990 } 15991 15992 template <typename Dispatch> setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const15993 VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, 15994 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 15995 { 15996 d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); 15997 } 15998 15999 template <typename Dispatch> setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const16000 VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, 16001 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16002 { 16003 d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); 16004 } 16005 16006 template <typename Dispatch> setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const16007 VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, 16008 VULKAN_HPP_NAMESPACE::StencilOp failOp, 16009 VULKAN_HPP_NAMESPACE::StencilOp passOp, 16010 VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, 16011 VULKAN_HPP_NAMESPACE::CompareOp compareOp, 16012 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16013 { 16014 d.vkCmdSetStencilOpEXT( m_commandBuffer, 16015 static_cast<VkStencilFaceFlags>( faceMask ), 16016 static_cast<VkStencilOp>( failOp ), 16017 static_cast<VkStencilOp>( passOp ), 16018 static_cast<VkStencilOp>( depthFailOp ), 16019 static_cast<VkCompareOp>( compareOp ) ); 16020 } 16021 16022 //=== VK_KHR_deferred_host_operations === 16023 16024 template <typename Dispatch> 16025 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,Dispatch const & d) const16026 Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16027 VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, 16028 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16029 { 16030 return static_cast<Result>( 16031 d.vkCreateDeferredOperationKHR( m_device, 16032 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16033 reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); 16034 } 16035 16036 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16037 template <typename Dispatch> 16038 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16039 Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const 16040 { 16041 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 16042 Result result = static_cast<Result>( 16043 d.vkCreateDeferredOperationKHR( m_device, 16044 reinterpret_cast<const VkAllocationCallbacks *>( 16045 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16046 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 16047 return createResultValue( 16048 result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); 16049 } 16050 16051 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16052 template <typename Dispatch> 16053 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique(Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16054 Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const 16055 { 16056 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; 16057 Result result = static_cast<Result>( 16058 d.vkCreateDeferredOperationKHR( m_device, 16059 reinterpret_cast<const VkAllocationCallbacks *>( 16060 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16061 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); 16062 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16063 return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( 16064 result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter ); 16065 } 16066 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 16067 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16068 16069 template <typename Dispatch> 16070 VULKAN_HPP_INLINE void destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16071 Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 16072 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16073 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16074 { 16075 d.vkDestroyDeferredOperationKHR( m_device, 16076 static_cast<VkDeferredOperationKHR>( operation ), 16077 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16078 } 16079 16080 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16081 template <typename Dispatch> destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16082 VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 16083 Optional<const AllocationCallbacks> allocator, 16084 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16085 { 16086 d.vkDestroyDeferredOperationKHR( m_device, 16087 static_cast<VkDeferredOperationKHR>( operation ), 16088 reinterpret_cast<const VkAllocationCallbacks *>( 16089 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16090 } 16091 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16092 16093 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16094 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 16095 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16096 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16097 { 16098 d.vkDestroyDeferredOperationKHR( m_device, 16099 static_cast<VkDeferredOperationKHR>( operation ), 16100 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16101 } 16102 16103 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16104 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16105 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, 16106 Optional<const AllocationCallbacks> allocator, 16107 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16108 { 16109 d.vkDestroyDeferredOperationKHR( m_device, 16110 static_cast<VkDeferredOperationKHR>( operation ), 16111 reinterpret_cast<const VkAllocationCallbacks *>( 16112 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16113 } 16114 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16115 16116 template <typename Dispatch> getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const16117 VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( 16118 VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16119 { 16120 return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ); 16121 } 16122 16123 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16124 template <typename Dispatch> getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const16125 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( 16126 VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16127 { 16128 return static_cast<Result>( 16129 d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 16130 } 16131 #else 16132 template <typename Dispatch> getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const16133 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( 16134 VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const 16135 { 16136 Result result = static_cast<Result>( 16137 d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 16138 return createResultValue( result, 16139 VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", 16140 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); 16141 } 16142 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16143 16144 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16145 template <typename Dispatch> deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const16146 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( 16147 VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16148 { 16149 return static_cast<Result>( 16150 d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 16151 } 16152 #else 16153 template <typename Dispatch> 16154 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,Dispatch const & d) const16155 Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const 16156 { 16157 Result result = 16158 static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); 16159 return createResultValue( result, 16160 VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", 16161 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 16162 VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, 16163 VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); 16164 } 16165 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16166 16167 //=== VK_KHR_pipeline_executable_properties === 16168 16169 template <typename Dispatch> 16170 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,Dispatch const & d) const16171 Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, 16172 uint32_t * pExecutableCount, 16173 VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, 16174 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16175 { 16176 return static_cast<Result>( 16177 d.vkGetPipelineExecutablePropertiesKHR( m_device, 16178 reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), 16179 pExecutableCount, 16180 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); 16181 } 16182 16183 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16184 template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch> 16185 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 16186 std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const PipelineInfoKHR & pipelineInfo,Dispatch const & d) const16187 Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const 16188 { 16189 std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties; 16190 uint32_t executableCount; 16191 Result result; 16192 do 16193 { 16194 result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( 16195 m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 16196 if ( ( result == Result::eSuccess ) && executableCount ) 16197 { 16198 properties.resize( executableCount ); 16199 result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( 16200 m_device, 16201 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 16202 &executableCount, 16203 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 16204 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 16205 } 16206 } while ( result == Result::eIncomplete ); 16207 if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) ) 16208 { 16209 properties.resize( executableCount ); 16210 } 16211 return createResultValue( 16212 result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 16213 } 16214 16215 template < 16216 typename PipelineExecutablePropertiesKHRAllocator, 16217 typename Dispatch, 16218 typename B, 16219 typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type> 16220 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< 16221 std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR(const PipelineInfoKHR & pipelineInfo,PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,Dispatch const & d) const16222 Device::getPipelineExecutablePropertiesKHR( 16223 const PipelineInfoKHR & pipelineInfo, 16224 PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, 16225 Dispatch const & d ) const 16226 { 16227 std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( 16228 pipelineExecutablePropertiesKHRAllocator ); 16229 uint32_t executableCount; 16230 Result result; 16231 do 16232 { 16233 result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( 16234 m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); 16235 if ( ( result == Result::eSuccess ) && executableCount ) 16236 { 16237 properties.resize( executableCount ); 16238 result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( 16239 m_device, 16240 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), 16241 &executableCount, 16242 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); 16243 VULKAN_HPP_ASSERT( executableCount <= properties.size() ); 16244 } 16245 } while ( result == Result::eIncomplete ); 16246 if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) ) 16247 { 16248 properties.resize( executableCount ); 16249 } 16250 return createResultValue( 16251 result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); 16252 } 16253 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16254 16255 template <typename Dispatch> 16256 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,Dispatch const & d) const16257 Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 16258 uint32_t * pStatisticCount, 16259 VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, 16260 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16261 { 16262 return static_cast<Result>( 16263 d.vkGetPipelineExecutableStatisticsKHR( m_device, 16264 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 16265 pStatisticCount, 16266 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); 16267 } 16268 16269 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16270 template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch> 16271 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 16272 typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const16273 Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, 16274 Dispatch const & d ) const 16275 { 16276 std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics; 16277 uint32_t statisticCount; 16278 Result result; 16279 do 16280 { 16281 result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( 16282 m_device, 16283 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16284 &statisticCount, 16285 nullptr ) ); 16286 if ( ( result == Result::eSuccess ) && statisticCount ) 16287 { 16288 statistics.resize( statisticCount ); 16289 result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( 16290 m_device, 16291 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16292 &statisticCount, 16293 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 16294 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 16295 } 16296 } while ( result == Result::eIncomplete ); 16297 if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) ) 16298 { 16299 statistics.resize( statisticCount ); 16300 } 16301 return createResultValue( 16302 result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 16303 } 16304 16305 template < 16306 typename PipelineExecutableStatisticKHRAllocator, 16307 typename Dispatch, 16308 typename B, 16309 typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type> 16310 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 16311 typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR(const PipelineExecutableInfoKHR & executableInfo,PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,Dispatch const & d) const16312 Device::getPipelineExecutableStatisticsKHR( 16313 const PipelineExecutableInfoKHR & executableInfo, 16314 PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, 16315 Dispatch const & d ) const 16316 { 16317 std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( 16318 pipelineExecutableStatisticKHRAllocator ); 16319 uint32_t statisticCount; 16320 Result result; 16321 do 16322 { 16323 result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( 16324 m_device, 16325 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16326 &statisticCount, 16327 nullptr ) ); 16328 if ( ( result == Result::eSuccess ) && statisticCount ) 16329 { 16330 statistics.resize( statisticCount ); 16331 result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( 16332 m_device, 16333 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16334 &statisticCount, 16335 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); 16336 VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); 16337 } 16338 } while ( result == Result::eIncomplete ); 16339 if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) ) 16340 { 16341 statistics.resize( statisticCount ); 16342 } 16343 return createResultValue( 16344 result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); 16345 } 16346 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16347 16348 template <typename Dispatch> getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,Dispatch const & d) const16349 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( 16350 const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, 16351 uint32_t * pInternalRepresentationCount, 16352 VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, 16353 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16354 { 16355 return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 16356 m_device, 16357 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), 16358 pInternalRepresentationCount, 16359 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); 16360 } 16361 16362 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16363 template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch> 16364 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 16365 typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, 16366 PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const PipelineExecutableInfoKHR & executableInfo,Dispatch const & d) const16367 Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, 16368 Dispatch const & d ) const 16369 { 16370 std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 16371 internalRepresentations; 16372 uint32_t internalRepresentationCount; 16373 Result result; 16374 do 16375 { 16376 result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 16377 m_device, 16378 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16379 &internalRepresentationCount, 16380 nullptr ) ); 16381 if ( ( result == Result::eSuccess ) && internalRepresentationCount ) 16382 { 16383 internalRepresentations.resize( internalRepresentationCount ); 16384 result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 16385 m_device, 16386 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16387 &internalRepresentationCount, 16388 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 16389 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 16390 } 16391 } while ( result == Result::eIncomplete ); 16392 if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) ) 16393 { 16394 internalRepresentations.resize( internalRepresentationCount ); 16395 } 16396 return createResultValue( result, 16397 internalRepresentations, 16398 VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 16399 } 16400 16401 template < 16402 typename PipelineExecutableInternalRepresentationKHRAllocator, 16403 typename Dispatch, 16404 typename B, 16405 typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, 16406 int>::type> 16407 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE 16408 typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, 16409 PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR(const PipelineExecutableInfoKHR & executableInfo,PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,Dispatch const & d) const16410 Device::getPipelineExecutableInternalRepresentationsKHR( 16411 const PipelineExecutableInfoKHR & executableInfo, 16412 PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, 16413 Dispatch const & d ) const 16414 { 16415 std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> 16416 internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); 16417 uint32_t internalRepresentationCount; 16418 Result result; 16419 do 16420 { 16421 result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 16422 m_device, 16423 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16424 &internalRepresentationCount, 16425 nullptr ) ); 16426 if ( ( result == Result::eSuccess ) && internalRepresentationCount ) 16427 { 16428 internalRepresentations.resize( internalRepresentationCount ); 16429 result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( 16430 m_device, 16431 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), 16432 &internalRepresentationCount, 16433 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); 16434 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); 16435 } 16436 } while ( result == Result::eIncomplete ); 16437 if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) ) 16438 { 16439 internalRepresentations.resize( internalRepresentationCount ); 16440 } 16441 return createResultValue( result, 16442 internalRepresentations, 16443 VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); 16444 } 16445 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16446 16447 //=== VK_NV_device_generated_commands === 16448 16449 template <typename Dispatch> getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const16450 VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( 16451 const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, 16452 VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, 16453 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16454 { 16455 d.vkGetGeneratedCommandsMemoryRequirementsNV( 16456 m_device, 16457 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), 16458 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); 16459 } 16460 16461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16462 template <typename Dispatch> 16463 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(const GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const16464 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, 16465 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16466 { 16467 VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; 16468 d.vkGetGeneratedCommandsMemoryRequirementsNV( 16469 m_device, 16470 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 16471 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16472 return memoryRequirements; 16473 } 16474 16475 template <typename X, typename Y, typename... Z, typename Dispatch> 16476 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(const GeneratedCommandsMemoryRequirementsInfoNV & info,Dispatch const & d) const16477 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, 16478 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16479 { 16480 StructureChain<X, Y, Z...> structureChain; 16481 VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = 16482 structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); 16483 d.vkGetGeneratedCommandsMemoryRequirementsNV( 16484 m_device, 16485 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), 16486 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); 16487 return structureChain; 16488 } 16489 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16490 16491 template <typename Dispatch> preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const16492 VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( 16493 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 16494 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16495 { 16496 d.vkCmdPreprocessGeneratedCommandsNV( 16497 m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 16498 } 16499 16500 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16501 template <typename Dispatch> 16502 VULKAN_HPP_INLINE void preprocessGeneratedCommandsNV(const GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const16503 CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, 16504 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16505 { 16506 d.vkCmdPreprocessGeneratedCommandsNV( 16507 m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 16508 } 16509 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16510 16511 template <typename Dispatch> executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,Dispatch const & d) const16512 VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( 16513 VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 16514 const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, 16515 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16516 { 16517 d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, 16518 static_cast<VkBool32>( isPreprocessed ), 16519 reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); 16520 } 16521 16522 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16523 template <typename Dispatch> 16524 VULKAN_HPP_INLINE void executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,const GeneratedCommandsInfoNV & generatedCommandsInfo,Dispatch const & d) const16525 CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, 16526 const GeneratedCommandsInfoNV & generatedCommandsInfo, 16527 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16528 { 16529 d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, 16530 static_cast<VkBool32>( isPreprocessed ), 16531 reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); 16532 } 16533 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16534 16535 template <typename Dispatch> 16536 VULKAN_HPP_INLINE void bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t groupIndex,Dispatch const & d) const16537 CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, 16538 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 16539 uint32_t groupIndex, 16540 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16541 { 16542 d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, 16543 static_cast<VkPipelineBindPoint>( pipelineBindPoint ), 16544 static_cast<VkPipeline>( pipeline ), 16545 groupIndex ); 16546 } 16547 16548 template <typename Dispatch> createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,Dispatch const & d) const16549 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( 16550 const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, 16551 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16552 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, 16553 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16554 { 16555 return static_cast<Result>( 16556 d.vkCreateIndirectCommandsLayoutNV( m_device, 16557 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), 16558 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16559 reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); 16560 } 16561 16562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16563 template <typename Dispatch> 16564 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 16565 typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV(const IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16566 Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, 16567 Optional<const AllocationCallbacks> allocator, 16568 Dispatch const & d ) const 16569 { 16570 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 16571 Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( 16572 m_device, 16573 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 16574 reinterpret_cast<const VkAllocationCallbacks *>( 16575 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16576 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 16577 return createResultValue( 16578 result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); 16579 } 16580 16581 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16582 template <typename Dispatch> 16583 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 16584 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique(const IndirectCommandsLayoutCreateInfoNV & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16585 Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, 16586 Optional<const AllocationCallbacks> allocator, 16587 Dispatch const & d ) const 16588 { 16589 VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; 16590 Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( 16591 m_device, 16592 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), 16593 reinterpret_cast<const VkAllocationCallbacks *>( 16594 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16595 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); 16596 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16597 return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( 16598 result, 16599 indirectCommandsLayout, 16600 VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique", 16601 deleter ); 16602 } 16603 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 16604 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16605 16606 template <typename Dispatch> 16607 VULKAN_HPP_INLINE void destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16608 Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 16609 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16610 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16611 { 16612 d.vkDestroyIndirectCommandsLayoutNV( m_device, 16613 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 16614 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16615 } 16616 16617 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16618 template <typename Dispatch> 16619 VULKAN_HPP_INLINE void destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16620 Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 16621 Optional<const AllocationCallbacks> allocator, 16622 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16623 { 16624 d.vkDestroyIndirectCommandsLayoutNV( 16625 m_device, 16626 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 16627 reinterpret_cast<const VkAllocationCallbacks *>( 16628 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16629 } 16630 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16631 16632 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16633 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 16634 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16635 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16636 { 16637 d.vkDestroyIndirectCommandsLayoutNV( m_device, 16638 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 16639 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16640 } 16641 16642 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16643 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16644 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, 16645 Optional<const AllocationCallbacks> allocator, 16646 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16647 { 16648 d.vkDestroyIndirectCommandsLayoutNV( 16649 m_device, 16650 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), 16651 reinterpret_cast<const VkAllocationCallbacks *>( 16652 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16653 } 16654 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16655 16656 //=== VK_EXT_acquire_drm_display === 16657 16658 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16659 template <typename Dispatch> acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const16660 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( 16661 int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16662 { 16663 return static_cast<Result>( 16664 d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 16665 } 16666 #else 16667 template <typename Dispatch> acquireDrmDisplayEXT(int32_t drmFd,VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const16668 VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireDrmDisplayEXT( 16669 int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 16670 { 16671 Result result = 16672 static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); 16673 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); 16674 } 16675 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16676 16677 template <typename Dispatch> 16678 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,VULKAN_HPP_NAMESPACE::DisplayKHR * display,Dispatch const & d) const16679 PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, 16680 uint32_t connectorId, 16681 VULKAN_HPP_NAMESPACE::DisplayKHR * display, 16682 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16683 { 16684 return static_cast<Result>( 16685 d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); 16686 } 16687 16688 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16689 template <typename Dispatch> 16690 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 16691 typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getDrmDisplayEXT(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const16692 PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 16693 { 16694 VULKAN_HPP_NAMESPACE::DisplayKHR display; 16695 Result result = static_cast<Result>( 16696 d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 16697 return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); 16698 } 16699 16700 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16701 template <typename Dispatch> 16702 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 16703 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getDrmDisplayEXTUnique(int32_t drmFd,uint32_t connectorId,Dispatch const & d) const16704 PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const 16705 { 16706 VULKAN_HPP_NAMESPACE::DisplayKHR display; 16707 Result result = static_cast<Result>( 16708 d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 16709 ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d ); 16710 return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( 16711 result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique", deleter ); 16712 } 16713 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 16714 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16715 16716 //=== VK_EXT_private_data === 16717 16718 template <typename Dispatch> 16719 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot,Dispatch const & d) const16720 Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo, 16721 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16722 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot, 16723 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16724 { 16725 return static_cast<Result>( 16726 d.vkCreatePrivateDataSlotEXT( m_device, 16727 reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( pCreateInfo ), 16728 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 16729 reinterpret_cast<VkPrivateDataSlotEXT *>( pPrivateDataSlot ) ) ); 16730 } 16731 16732 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16733 template <typename Dispatch> 16734 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT(const PrivateDataSlotCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16735 Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, 16736 Optional<const AllocationCallbacks> allocator, 16737 Dispatch const & d ) const 16738 { 16739 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; 16740 Result result = static_cast<Result>( 16741 d.vkCreatePrivateDataSlotEXT( m_device, 16742 reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ), 16743 reinterpret_cast<const VkAllocationCallbacks *>( 16744 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16745 reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) ); 16746 return createResultValue( 16747 result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); 16748 } 16749 16750 # ifndef VULKAN_HPP_NO_SMART_HANDLE 16751 template <typename Dispatch> 16752 VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type createPrivateDataSlotEXTUnique(const PrivateDataSlotCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16753 Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, 16754 Optional<const AllocationCallbacks> allocator, 16755 Dispatch const & d ) const 16756 { 16757 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; 16758 Result result = static_cast<Result>( 16759 d.vkCreatePrivateDataSlotEXT( m_device, 16760 reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ), 16761 reinterpret_cast<const VkAllocationCallbacks *>( 16762 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 16763 reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) ); 16764 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 16765 return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>( 16766 result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter ); 16767 } 16768 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 16769 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16770 16771 template <typename Dispatch> 16772 VULKAN_HPP_INLINE void destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16773 Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16774 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16775 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16776 { 16777 d.vkDestroyPrivateDataSlotEXT( m_device, 16778 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16779 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16780 } 16781 16782 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16783 template <typename Dispatch> destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16784 VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16785 Optional<const AllocationCallbacks> allocator, 16786 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16787 { 16788 d.vkDestroyPrivateDataSlotEXT( m_device, 16789 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16790 reinterpret_cast<const VkAllocationCallbacks *>( 16791 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16792 } 16793 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16794 16795 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const16796 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16797 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 16798 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16799 { 16800 d.vkDestroyPrivateDataSlotEXT( m_device, 16801 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16802 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); 16803 } 16804 16805 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16806 template <typename Dispatch> destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const16807 VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16808 Optional<const AllocationCallbacks> allocator, 16809 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16810 { 16811 d.vkDestroyPrivateDataSlotEXT( m_device, 16812 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16813 reinterpret_cast<const VkAllocationCallbacks *>( 16814 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); 16815 } 16816 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16817 16818 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 16819 template <typename Dispatch> 16820 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,uint64_t data,Dispatch const & d) const16821 Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, 16822 uint64_t objectHandle, 16823 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16824 uint64_t data, 16825 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16826 { 16827 return static_cast<Result>( d.vkSetPrivateDataEXT( m_device, 16828 static_cast<VkObjectType>( objectType ), 16829 objectHandle, 16830 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16831 data ) ); 16832 } 16833 #else 16834 template <typename Dispatch> 16835 VULKAN_HPP_INLINE typename ResultValueType<void>::type setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,uint64_t data,Dispatch const & d) const16836 Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, 16837 uint64_t objectHandle, 16838 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16839 uint64_t data, 16840 Dispatch const & d ) const 16841 { 16842 Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device, 16843 static_cast<VkObjectType>( objectType ), 16844 objectHandle, 16845 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16846 data ) ); 16847 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); 16848 } 16849 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16850 16851 template <typename Dispatch> getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,uint64_t * pData,Dispatch const & d) const16852 VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, 16853 uint64_t objectHandle, 16854 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16855 uint64_t * pData, 16856 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16857 { 16858 d.vkGetPrivateDataEXT( m_device, 16859 static_cast<VkObjectType>( objectType ), 16860 objectHandle, 16861 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16862 pData ); 16863 } 16864 16865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16866 template <typename Dispatch> 16867 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,Dispatch const & d) const16868 Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, 16869 uint64_t objectHandle, 16870 VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, 16871 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16872 { 16873 uint64_t data; 16874 d.vkGetPrivateDataEXT( m_device, 16875 static_cast<VkObjectType>( objectType ), 16876 objectHandle, 16877 static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), 16878 &data ); 16879 return data; 16880 } 16881 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16882 16883 #if defined( VK_ENABLE_BETA_EXTENSIONS ) 16884 //=== VK_KHR_video_encode_queue === 16885 16886 template <typename Dispatch> encodeVideoKHR(const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,Dispatch const & d) const16887 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, 16888 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16889 { 16890 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); 16891 } 16892 16893 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16894 template <typename Dispatch> encodeVideoKHR(const VideoEncodeInfoKHR & encodeInfo,Dispatch const & d) const16895 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo, 16896 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16897 { 16898 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) ); 16899 } 16900 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16901 #endif /*VK_ENABLE_BETA_EXTENSIONS*/ 16902 16903 //=== VK_KHR_synchronization2 === 16904 16905 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,Dispatch const & d) const16906 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 16907 const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, 16908 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16909 { 16910 d.vkCmdSetEvent2KHR( m_commandBuffer, 16911 static_cast<VkEvent>( event ), 16912 reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) ); 16913 } 16914 16915 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16916 template <typename Dispatch> setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const DependencyInfoKHR & dependencyInfo,Dispatch const & d) const16917 VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 16918 const DependencyInfoKHR & dependencyInfo, 16919 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16920 { 16921 d.vkCmdSetEvent2KHR( m_commandBuffer, 16922 static_cast<VkEvent>( event ), 16923 reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) ); 16924 } 16925 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16926 16927 template <typename Dispatch> resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,Dispatch const & d) const16928 VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, 16929 VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, 16930 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16931 { 16932 d.vkCmdResetEvent2KHR( 16933 m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2KHR>( stageMask ) ); 16934 } 16935 16936 template <typename Dispatch> 16937 VULKAN_HPP_INLINE void waitEvents2KHR(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,Dispatch const & d) const16938 CommandBuffer::waitEvents2KHR( uint32_t eventCount, 16939 const VULKAN_HPP_NAMESPACE::Event * pEvents, 16940 const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos, 16941 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16942 { 16943 d.vkCmdWaitEvents2KHR( m_commandBuffer, 16944 eventCount, 16945 reinterpret_cast<const VkEvent *>( pEvents ), 16946 reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfos ) ); 16947 } 16948 16949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16950 template <typename Dispatch> 16951 VULKAN_HPP_INLINE void waitEvents2KHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,Dispatch const & d) const16952 CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, 16953 ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos, 16954 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS 16955 { 16956 # ifdef VULKAN_HPP_NO_EXCEPTIONS 16957 VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); 16958 # else 16959 if ( events.size() != dependencyInfos.size() ) 16960 { 16961 throw LogicError( VULKAN_HPP_NAMESPACE_STRING 16962 "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); 16963 } 16964 # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ 16965 16966 d.vkCmdWaitEvents2KHR( m_commandBuffer, 16967 events.size(), 16968 reinterpret_cast<const VkEvent *>( events.data() ), 16969 reinterpret_cast<const VkDependencyInfoKHR *>( dependencyInfos.data() ) ); 16970 } 16971 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16972 16973 template <typename Dispatch> 16974 VULKAN_HPP_INLINE void pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,Dispatch const & d) const16975 CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, 16976 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16977 { 16978 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) ); 16979 } 16980 16981 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 16982 template <typename Dispatch> pipelineBarrier2KHR(const DependencyInfoKHR & dependencyInfo,Dispatch const & d) const16983 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, 16984 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16985 { 16986 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) ); 16987 } 16988 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 16989 16990 template <typename Dispatch> writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const16991 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, 16992 VULKAN_HPP_NAMESPACE::QueryPool queryPool, 16993 uint32_t query, 16994 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 16995 { 16996 d.vkCmdWriteTimestamp2KHR( 16997 m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkQueryPool>( queryPool ), query ); 16998 } 16999 17000 template <typename Dispatch> 17001 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result submit2KHR(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const17002 Queue::submit2KHR( uint32_t submitCount, 17003 const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits, 17004 VULKAN_HPP_NAMESPACE::Fence fence, 17005 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17006 { 17007 return static_cast<Result>( d.vkQueueSubmit2KHR( 17008 m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2KHR *>( pSubmits ), static_cast<VkFence>( fence ) ) ); 17009 } 17010 17011 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17012 template <typename Dispatch> 17013 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type submit2KHR(ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const17014 Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits, 17015 VULKAN_HPP_NAMESPACE::Fence fence, 17016 Dispatch const & d ) const 17017 { 17018 Result result = 17019 static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, 17020 submits.size(), 17021 reinterpret_cast<const VkSubmitInfo2KHR *>( submits.data() ), 17022 static_cast<VkFence>( fence ) ) ); 17023 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); 17024 } 17025 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17026 17027 template <typename Dispatch> writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const17028 VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, 17029 VULKAN_HPP_NAMESPACE::Buffer dstBuffer, 17030 VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, 17031 uint32_t marker, 17032 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17033 { 17034 d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, 17035 static_cast<VkPipelineStageFlags2KHR>( stage ), 17036 static_cast<VkBuffer>( dstBuffer ), 17037 static_cast<VkDeviceSize>( dstOffset ), 17038 marker ); 17039 } 17040 17041 template <typename Dispatch> getCheckpointData2NV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,Dispatch const & d) const17042 VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, 17043 VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, 17044 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17045 { 17046 d.vkGetQueueCheckpointData2NV( 17047 m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); 17048 } 17049 17050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17051 template <typename CheckpointData2NVAllocator, typename Dispatch> 17052 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(Dispatch const & d) const17053 Queue::getCheckpointData2NV( Dispatch const & d ) const 17054 { 17055 std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData; 17056 uint32_t checkpointDataCount; 17057 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 17058 checkpointData.resize( checkpointDataCount ); 17059 d.vkGetQueueCheckpointData2NV( 17060 m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 17061 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 17062 return checkpointData; 17063 } 17064 17065 template <typename CheckpointData2NVAllocator, 17066 typename Dispatch, 17067 typename B, 17068 typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type> 17069 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> getCheckpointData2NV(CheckpointData2NVAllocator & checkpointData2NVAllocator,Dispatch const & d) const17070 Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const 17071 { 17072 std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator ); 17073 uint32_t checkpointDataCount; 17074 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); 17075 checkpointData.resize( checkpointDataCount ); 17076 d.vkGetQueueCheckpointData2NV( 17077 m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); 17078 VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); 17079 return checkpointData; 17080 } 17081 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17082 17083 //=== VK_NV_fragment_shading_rate_enums === 17084 17085 template <typename Dispatch> setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const17086 VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( 17087 VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, 17088 const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], 17089 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17090 { 17091 d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer, 17092 static_cast<VkFragmentShadingRateNV>( shadingRate ), 17093 reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); 17094 } 17095 17096 //=== VK_KHR_copy_commands2 === 17097 17098 template <typename Dispatch> 17099 VULKAN_HPP_INLINE void copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,Dispatch const & d) const17100 CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo, 17101 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17102 { 17103 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( pCopyBufferInfo ) ); 17104 } 17105 17106 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17107 template <typename Dispatch> copyBuffer2KHR(const CopyBufferInfo2KHR & copyBufferInfo,Dispatch const & d) const17108 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, 17109 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17110 { 17111 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( ©BufferInfo ) ); 17112 } 17113 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17114 17115 template <typename Dispatch> copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,Dispatch const & d) const17116 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo, 17117 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17118 { 17119 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( pCopyImageInfo ) ); 17120 } 17121 17122 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17123 template <typename Dispatch> copyImage2KHR(const CopyImageInfo2KHR & copyImageInfo,Dispatch const & d) const17124 VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, 17125 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17126 { 17127 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( ©ImageInfo ) ); 17128 } 17129 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17130 17131 template <typename Dispatch> copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,Dispatch const & d) const17132 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( 17133 const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo, 17134 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17135 { 17136 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, 17137 reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( pCopyBufferToImageInfo ) ); 17138 } 17139 17140 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17141 template <typename Dispatch> copyBufferToImage2KHR(const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,Dispatch const & d) const17142 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, 17143 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17144 { 17145 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, 17146 reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( ©BufferToImageInfo ) ); 17147 } 17148 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17149 17150 template <typename Dispatch> copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,Dispatch const & d) const17151 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( 17152 const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo, 17153 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17154 { 17155 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, 17156 reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( pCopyImageToBufferInfo ) ); 17157 } 17158 17159 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17160 template <typename Dispatch> copyImageToBuffer2KHR(const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,Dispatch const & d) const17161 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, 17162 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17163 { 17164 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, 17165 reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( ©ImageToBufferInfo ) ); 17166 } 17167 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17168 17169 template <typename Dispatch> blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,Dispatch const & d) const17170 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo, 17171 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17172 { 17173 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( pBlitImageInfo ) ); 17174 } 17175 17176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17177 template <typename Dispatch> blitImage2KHR(const BlitImageInfo2KHR & blitImageInfo,Dispatch const & d) const17178 VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, 17179 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17180 { 17181 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( &blitImageInfo ) ); 17182 } 17183 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17184 17185 template <typename Dispatch> 17186 VULKAN_HPP_INLINE void resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,Dispatch const & d) const17187 CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo, 17188 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17189 { 17190 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( pResolveImageInfo ) ); 17191 } 17192 17193 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17194 template <typename Dispatch> resolveImage2KHR(const ResolveImageInfo2KHR & resolveImageInfo,Dispatch const & d) const17195 VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, 17196 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17197 { 17198 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( &resolveImageInfo ) ); 17199 } 17200 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17201 17202 #if defined( VK_USE_PLATFORM_WIN32_KHR ) 17203 //=== VK_NV_acquire_winrt_display === 17204 17205 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE 17206 template <typename Dispatch> acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const17207 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( 17208 VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17209 { 17210 return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 17211 } 17212 # else 17213 template <typename Dispatch> 17214 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type acquireWinrtDisplayNV(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const17215 PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const 17216 { 17217 Result result = 17218 static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); 17219 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); 17220 } 17221 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17222 17223 template <typename Dispatch> 17224 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getWinrtDisplayNV(uint32_t deviceRelativeId,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,Dispatch const & d) const17225 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, 17226 VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, 17227 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17228 { 17229 return static_cast<Result>( 17230 d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); 17231 } 17232 17233 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17234 template <typename Dispatch> 17235 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 17236 typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV(uint32_t deviceRelativeId,Dispatch const & d) const17237 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const 17238 { 17239 VULKAN_HPP_NAMESPACE::DisplayKHR display; 17240 Result result = static_cast<Result>( 17241 d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 17242 return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); 17243 } 17244 17245 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17246 template <typename Dispatch> 17247 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 17248 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique(uint32_t deviceRelativeId,Dispatch const & d) const17249 PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const 17250 { 17251 VULKAN_HPP_NAMESPACE::DisplayKHR display; 17252 Result result = static_cast<Result>( 17253 d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); 17254 ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d ); 17255 return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( 17256 result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter ); 17257 } 17258 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 17259 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17260 #endif /*VK_USE_PLATFORM_WIN32_KHR*/ 17261 17262 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) 17263 //=== VK_EXT_directfb_surface === 17264 17265 template <typename Dispatch> 17266 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createDirectFBSurfaceEXT(const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const17267 Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, 17268 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17269 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 17270 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17271 { 17272 return static_cast<Result>( 17273 d.vkCreateDirectFBSurfaceEXT( m_instance, 17274 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), 17275 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17276 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 17277 } 17278 17279 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17280 template <typename Dispatch> 17281 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 17282 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT(const DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const17283 Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, 17284 Optional<const AllocationCallbacks> allocator, 17285 Dispatch const & d ) const 17286 { 17287 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17288 Result result = static_cast<Result>( 17289 d.vkCreateDirectFBSurfaceEXT( m_instance, 17290 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 17291 reinterpret_cast<const VkAllocationCallbacks *>( 17292 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17293 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17294 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); 17295 } 17296 17297 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17298 template <typename Dispatch> 17299 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 17300 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique(const DirectFBSurfaceCreateInfoEXT & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const17301 Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, 17302 Optional<const AllocationCallbacks> allocator, 17303 Dispatch const & d ) const 17304 { 17305 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 17306 Result result = static_cast<Result>( 17307 d.vkCreateDirectFBSurfaceEXT( m_instance, 17308 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), 17309 reinterpret_cast<const VkAllocationCallbacks *>( 17310 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17311 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 17312 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 17313 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 17314 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter ); 17315 } 17316 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 17317 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17318 17319 template <typename Dispatch> getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB * dfb,Dispatch const & d) const17320 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( 17321 uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17322 { 17323 return static_cast<Bool32>( 17324 d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); 17325 } 17326 17327 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17328 template <typename Dispatch> getDirectFBPresentationSupportEXT(uint32_t queueFamilyIndex,IDirectFB & dfb,Dispatch const & d) const17329 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( 17330 uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17331 { 17332 return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); 17333 } 17334 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17335 #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ 17336 17337 //=== VK_KHR_ray_tracing_pipeline === 17338 17339 template <typename Dispatch> traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const17340 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( 17341 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 17342 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 17343 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 17344 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 17345 uint32_t width, 17346 uint32_t height, 17347 uint32_t depth, 17348 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17349 { 17350 d.vkCmdTraceRaysKHR( m_commandBuffer, 17351 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 17352 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 17353 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 17354 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 17355 width, 17356 height, 17357 depth ); 17358 } 17359 17360 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17361 template <typename Dispatch> traceRaysKHR(const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const StridedDeviceAddressRegionKHR & missShaderBindingTable,const StridedDeviceAddressRegionKHR & hitShaderBindingTable,const StridedDeviceAddressRegionKHR & callableShaderBindingTable,uint32_t width,uint32_t height,uint32_t depth,Dispatch const & d) const17362 VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 17363 const StridedDeviceAddressRegionKHR & missShaderBindingTable, 17364 const StridedDeviceAddressRegionKHR & hitShaderBindingTable, 17365 const StridedDeviceAddressRegionKHR & callableShaderBindingTable, 17366 uint32_t width, 17367 uint32_t height, 17368 uint32_t depth, 17369 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17370 { 17371 d.vkCmdTraceRaysKHR( m_commandBuffer, 17372 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 17373 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 17374 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 17375 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 17376 width, 17377 height, 17378 depth ); 17379 } 17380 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17381 17382 template <typename Dispatch> 17383 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const17384 Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17385 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17386 uint32_t createInfoCount, 17387 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, 17388 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 17389 VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, 17390 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17391 { 17392 return static_cast<Result>( 17393 d.vkCreateRayTracingPipelinesKHR( m_device, 17394 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17395 static_cast<VkPipelineCache>( pipelineCache ), 17396 createInfoCount, 17397 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), 17398 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 17399 reinterpret_cast<VkPipeline *>( pPipelines ) ) ); 17400 } 17401 17402 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17403 template <typename PipelineAllocator, typename Dispatch> 17404 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const17405 Device::createRayTracingPipelinesKHR( 17406 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17407 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17408 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17409 Optional<const AllocationCallbacks> allocator, 17410 Dispatch const & d ) const 17411 { 17412 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); 17413 Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( 17414 m_device, 17415 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17416 static_cast<VkPipelineCache>( pipelineCache ), 17417 createInfos.size(), 17418 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17419 reinterpret_cast<const VkAllocationCallbacks *>( 17420 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17421 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17422 return createResultValue( result, 17423 pipelines, 17424 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 17425 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17426 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17427 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17428 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17429 } 17430 17431 template <typename PipelineAllocator, 17432 typename Dispatch, 17433 typename B, 17434 typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type> 17435 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17436 Device::createRayTracingPipelinesKHR( 17437 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17438 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17439 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17440 Optional<const AllocationCallbacks> allocator, 17441 PipelineAllocator & pipelineAllocator, 17442 Dispatch const & d ) const 17443 { 17444 std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); 17445 Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( 17446 m_device, 17447 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17448 static_cast<VkPipelineCache>( pipelineCache ), 17449 createInfos.size(), 17450 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17451 reinterpret_cast<const VkAllocationCallbacks *>( 17452 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17453 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17454 return createResultValue( result, 17455 pipelines, 17456 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", 17457 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17458 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17459 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17460 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17461 } 17462 17463 template <typename Dispatch> 17464 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const17465 Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17466 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17467 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 17468 Optional<const AllocationCallbacks> allocator, 17469 Dispatch const & d ) const 17470 { 17471 Pipeline pipeline; 17472 Result result = static_cast<Result>( 17473 d.vkCreateRayTracingPipelinesKHR( m_device, 17474 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17475 static_cast<VkPipelineCache>( pipelineCache ), 17476 1, 17477 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 17478 reinterpret_cast<const VkAllocationCallbacks *>( 17479 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17480 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17481 return createResultValue( result, 17482 pipeline, 17483 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", 17484 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17485 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17486 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17487 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17488 } 17489 17490 # ifndef VULKAN_HPP_NO_SMART_HANDLE 17491 template <typename Dispatch, typename PipelineAllocator> 17492 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const17493 Device::createRayTracingPipelinesKHRUnique( 17494 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17495 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17496 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17497 Optional<const AllocationCallbacks> allocator, 17498 Dispatch const & d ) const 17499 { 17500 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; 17501 std::vector<Pipeline> pipelines( createInfos.size() ); 17502 Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( 17503 m_device, 17504 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17505 static_cast<VkPipelineCache>( pipelineCache ), 17506 createInfos.size(), 17507 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17508 reinterpret_cast<const VkAllocationCallbacks *>( 17509 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17510 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17511 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 17512 ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || 17513 ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || 17514 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 17515 { 17516 uniquePipelines.reserve( createInfos.size() ); 17517 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17518 for ( size_t i = 0; i < createInfos.size(); i++ ) 17519 { 17520 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 17521 } 17522 } 17523 return createResultValue( result, 17524 std::move( uniquePipelines ), 17525 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 17526 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17527 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17528 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17529 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17530 } 17531 17532 template < 17533 typename Dispatch, 17534 typename PipelineAllocator, 17535 typename B, 17536 typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type> 17537 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,Optional<const AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const17538 Device::createRayTracingPipelinesKHRUnique( 17539 VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17540 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17541 ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, 17542 Optional<const AllocationCallbacks> allocator, 17543 PipelineAllocator & pipelineAllocator, 17544 Dispatch const & d ) const 17545 { 17546 std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); 17547 std::vector<Pipeline> pipelines( createInfos.size() ); 17548 Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( 17549 m_device, 17550 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17551 static_cast<VkPipelineCache>( pipelineCache ), 17552 createInfos.size(), 17553 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), 17554 reinterpret_cast<const VkAllocationCallbacks *>( 17555 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17556 reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); 17557 if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || 17558 ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || 17559 ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || 17560 ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) 17561 { 17562 uniquePipelines.reserve( createInfos.size() ); 17563 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17564 for ( size_t i = 0; i < createInfos.size(); i++ ) 17565 { 17566 uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) ); 17567 } 17568 } 17569 return createResultValue( result, 17570 std::move( uniquePipelines ), 17571 VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", 17572 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17573 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17574 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17575 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); 17576 } 17577 17578 template <typename Dispatch> 17579 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const17580 Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, 17581 VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, 17582 const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, 17583 Optional<const AllocationCallbacks> allocator, 17584 Dispatch const & d ) const 17585 { 17586 Pipeline pipeline; 17587 Result result = static_cast<Result>( 17588 d.vkCreateRayTracingPipelinesKHR( m_device, 17589 static_cast<VkDeferredOperationKHR>( deferredOperation ), 17590 static_cast<VkPipelineCache>( pipelineCache ), 17591 1, 17592 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), 17593 reinterpret_cast<const VkAllocationCallbacks *>( 17594 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 17595 reinterpret_cast<VkPipeline *>( &pipeline ) ) ); 17596 ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); 17597 return createResultValue<Pipeline, Dispatch>( result, 17598 pipeline, 17599 VULKAN_HPP_NAMESPACE_STRING 17600 "::Device::createRayTracingPipelineKHRUnique", 17601 { VULKAN_HPP_NAMESPACE::Result::eSuccess, 17602 VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, 17603 VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, 17604 VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, 17605 deleter ); 17606 } 17607 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 17608 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17609 17610 template <typename Dispatch> 17611 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17612 Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17613 uint32_t firstGroup, 17614 uint32_t groupCount, 17615 size_t dataSize, 17616 void * pData, 17617 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17618 { 17619 return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( 17620 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17621 } 17622 17623 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17624 template <typename T, typename Dispatch> 17625 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 17626 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,ArrayProxy<T> const & data,Dispatch const & d) const17627 typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR( 17628 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17629 uint32_t firstGroup, 17630 uint32_t groupCount, 17631 ArrayProxy<T> const & data, 17632 Dispatch const & d ) const 17633 { 17634 Result result = 17635 static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, 17636 static_cast<VkPipeline>( pipeline ), 17637 firstGroup, 17638 groupCount, 17639 data.size() * sizeof( T ), 17640 reinterpret_cast<void *>( data.data() ) ) ); 17641 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); 17642 } 17643 17644 template <typename T, typename Allocator, typename Dispatch> 17645 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17646 Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17647 uint32_t firstGroup, 17648 uint32_t groupCount, 17649 size_t dataSize, 17650 Dispatch const & d ) const 17651 { 17652 VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); 17653 std::vector<T, Allocator> data( dataSize / sizeof( T ) ); 17654 Result result = 17655 static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, 17656 static_cast<VkPipeline>( pipeline ), 17657 firstGroup, 17658 groupCount, 17659 data.size() * sizeof( T ), 17660 reinterpret_cast<void *>( data.data() ) ) ); 17661 return createResultValue( 17662 result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); 17663 } 17664 17665 template <typename T, typename Dispatch> 17666 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17667 Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17668 uint32_t firstGroup, 17669 uint32_t groupCount, 17670 Dispatch const & d ) const 17671 { 17672 T data; 17673 Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, 17674 static_cast<VkPipeline>( pipeline ), 17675 firstGroup, 17676 groupCount, 17677 sizeof( T ), 17678 reinterpret_cast<void *>( &data ) ) ); 17679 return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); 17680 } 17681 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17682 17683 template <typename Dispatch> 17684 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData,Dispatch const & d) const17685 Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17686 uint32_t firstGroup, 17687 uint32_t groupCount, 17688 size_t dataSize, 17689 void * pData, 17690 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17691 { 17692 return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( 17693 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); 17694 } 17695 17696 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17697 template <typename T, typename Dispatch> 17698 VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) 17699 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,ArrayProxy<T> const & data,Dispatch const & d) const17700 typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( 17701 VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17702 uint32_t firstGroup, 17703 uint32_t groupCount, 17704 ArrayProxy<T> const & data, 17705 Dispatch const & d ) const 17706 { 17707 Result result = static_cast<Result>( 17708 d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, 17709 static_cast<VkPipeline>( pipeline ), 17710 firstGroup, 17711 groupCount, 17712 data.size() * sizeof( T ), 17713 reinterpret_cast<void *>( data.data() ) ) ); 17714 return createResultValue( result, 17715 VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); 17716 } 17717 17718 template <typename T, typename Allocator, typename Dispatch> 17719 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,Dispatch const & d) const17720 Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17721 uint32_t firstGroup, 17722 uint32_t groupCount, 17723 size_t dataSize, 17724 Dispatch const & d ) const 17725 { 17726 VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); 17727 std::vector<T, Allocator> data( dataSize / sizeof( T ) ); 17728 Result result = static_cast<Result>( 17729 d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, 17730 static_cast<VkPipeline>( pipeline ), 17731 firstGroup, 17732 groupCount, 17733 data.size() * sizeof( T ), 17734 reinterpret_cast<void *>( data.data() ) ) ); 17735 return createResultValue( 17736 result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); 17737 } 17738 17739 template <typename T, typename Dispatch> 17740 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t firstGroup,uint32_t groupCount,Dispatch const & d) const17741 Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17742 uint32_t firstGroup, 17743 uint32_t groupCount, 17744 Dispatch const & d ) const 17745 { 17746 T data; 17747 Result result = 17748 static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, 17749 static_cast<VkPipeline>( pipeline ), 17750 firstGroup, 17751 groupCount, 17752 sizeof( T ), 17753 reinterpret_cast<void *>( &data ) ) ); 17754 return createResultValue( 17755 result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); 17756 } 17757 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17758 17759 template <typename Dispatch> traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const17760 VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( 17761 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, 17762 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, 17763 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, 17764 const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, 17765 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 17766 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17767 { 17768 d.vkCmdTraceRaysIndirectKHR( 17769 m_commandBuffer, 17770 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), 17771 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), 17772 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), 17773 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), 17774 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 17775 } 17776 17777 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17778 template <typename Dispatch> 17779 VULKAN_HPP_INLINE void traceRaysIndirectKHR(const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,const StridedDeviceAddressRegionKHR & missShaderBindingTable,const StridedDeviceAddressRegionKHR & hitShaderBindingTable,const StridedDeviceAddressRegionKHR & callableShaderBindingTable,VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,Dispatch const & d) const17780 CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, 17781 const StridedDeviceAddressRegionKHR & missShaderBindingTable, 17782 const StridedDeviceAddressRegionKHR & hitShaderBindingTable, 17783 const StridedDeviceAddressRegionKHR & callableShaderBindingTable, 17784 VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, 17785 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17786 { 17787 d.vkCmdTraceRaysIndirectKHR( 17788 m_commandBuffer, 17789 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), 17790 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), 17791 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), 17792 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), 17793 static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); 17794 } 17795 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17796 17797 template <typename Dispatch> 17798 VULKAN_HPP_INLINE DeviceSize getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline,uint32_t group,VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,Dispatch const & d) const17799 Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, 17800 uint32_t group, 17801 VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, 17802 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17803 { 17804 return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR( 17805 m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); 17806 } 17807 17808 template <typename Dispatch> 17809 VULKAN_HPP_INLINE void setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize,Dispatch const & d) const17810 CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, 17811 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17812 { 17813 d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); 17814 } 17815 17816 //=== VK_EXT_vertex_input_dynamic_state === 17817 17818 template <typename Dispatch> setVertexInputEXT(uint32_t vertexBindingDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,Dispatch const & d) const17819 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( 17820 uint32_t vertexBindingDescriptionCount, 17821 const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, 17822 uint32_t vertexAttributeDescriptionCount, 17823 const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, 17824 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17825 { 17826 d.vkCmdSetVertexInputEXT( 17827 m_commandBuffer, 17828 vertexBindingDescriptionCount, 17829 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), 17830 vertexAttributeDescriptionCount, 17831 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) ); 17832 } 17833 17834 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17835 template <typename Dispatch> setVertexInputEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,Dispatch const & d) const17836 VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( 17837 ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, 17838 ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, 17839 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17840 { 17841 d.vkCmdSetVertexInputEXT( 17842 m_commandBuffer, 17843 vertexBindingDescriptions.size(), 17844 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), 17845 vertexAttributeDescriptions.size(), 17846 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) ); 17847 } 17848 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17849 17850 #if defined( VK_USE_PLATFORM_FUCHSIA ) 17851 //=== VK_FUCHSIA_external_memory === 17852 17853 template <typename Dispatch> getMemoryZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const17854 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA( 17855 const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 17856 zx_handle_t * pZirconHandle, 17857 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17858 { 17859 return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA( 17860 m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); 17861 } 17862 17863 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17864 template <typename Dispatch> 17865 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getMemoryZirconHandleFUCHSIA(const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const17866 Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, 17867 Dispatch const & d ) const 17868 { 17869 zx_handle_t zirconHandle; 17870 Result result = static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA( 17871 m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); 17872 return createResultValue( 17873 result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); 17874 } 17875 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17876 17877 template <typename Dispatch> getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,Dispatch const & d) const17878 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA( 17879 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 17880 zx_handle_t zirconHandle, 17881 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, 17882 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17883 { 17884 return static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( 17885 m_device, 17886 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 17887 zirconHandle, 17888 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); 17889 } 17890 17891 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17892 template <typename Dispatch> 17893 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type getMemoryZirconHandlePropertiesFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,zx_handle_t zirconHandle,Dispatch const & d) const17894 Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, 17895 zx_handle_t zirconHandle, 17896 Dispatch const & d ) const 17897 { 17898 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; 17899 Result result = static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( 17900 m_device, 17901 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), 17902 zirconHandle, 17903 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) ); 17904 return createResultValue( result, 17905 memoryZirconHandleProperties, 17906 VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); 17907 } 17908 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17909 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 17910 17911 #if defined( VK_USE_PLATFORM_FUCHSIA ) 17912 //=== VK_FUCHSIA_external_semaphore === 17913 17914 template <typename Dispatch> importSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,Dispatch const & d) const17915 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( 17916 const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, 17917 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17918 { 17919 return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 17920 m_device, 17921 reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); 17922 } 17923 17924 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17925 template <typename Dispatch> 17926 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,Dispatch const & d) const17927 Device::importSemaphoreZirconHandleFUCHSIA( 17928 const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const 17929 { 17930 Result result = static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( 17931 m_device, 17932 reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) ); 17933 return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); 17934 } 17935 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17936 17937 template <typename Dispatch> getSemaphoreZirconHandleFUCHSIA(const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle,Dispatch const & d) const17938 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA( 17939 const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, 17940 zx_handle_t * pZirconHandle, 17941 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17942 { 17943 return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA( 17944 m_device, 17945 reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), 17946 pZirconHandle ) ); 17947 } 17948 17949 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17950 template <typename Dispatch> 17951 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA(const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,Dispatch const & d) const17952 Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, 17953 Dispatch const & d ) const 17954 { 17955 zx_handle_t zirconHandle; 17956 Result result = static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA( 17957 m_device, 17958 reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), 17959 &zirconHandle ) ); 17960 return createResultValue( 17961 result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); 17962 } 17963 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17964 #endif /*VK_USE_PLATFORM_FUCHSIA*/ 17965 17966 //=== VK_HUAWEI_subpass_shading === 17967 17968 template <typename Dispatch> 17969 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,Dispatch const & d) const17970 Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, 17971 VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, 17972 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17973 { 17974 return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 17975 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); 17976 } 17977 17978 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 17979 template <typename Dispatch> 17980 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass,Dispatch const & d) const17981 Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, 17982 Dispatch const & d ) const 17983 { 17984 VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; 17985 Result result = static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( 17986 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) ); 17987 return createResultValue( result, 17988 maxWorkgroupSize, 17989 VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", 17990 { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); 17991 } 17992 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 17993 17994 template <typename Dispatch> subpassShadingHUAWEI(Dispatch const & d) const17995 VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 17996 { 17997 d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); 17998 } 17999 18000 //=== VK_NV_external_memory_rdma === 18001 18002 template <typename Dispatch> getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * getMemoryRemoteAddressInfo,VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,Dispatch const & d) const18003 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryRemoteAddressNV( 18004 const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * getMemoryRemoteAddressInfo, 18005 VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, 18006 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18007 { 18008 return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( 18009 m_device, 18010 reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( getMemoryRemoteAddressInfo ), 18011 reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); 18012 } 18013 18014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18015 template <typename Dispatch> 18016 VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type getMemoryRemoteAddressNV(const MemoryGetRemoteAddressInfoNV & getMemoryRemoteAddressInfo,Dispatch const & d) const18017 Device::getMemoryRemoteAddressNV( const MemoryGetRemoteAddressInfoNV & getMemoryRemoteAddressInfo, 18018 Dispatch const & d ) const 18019 { 18020 VULKAN_HPP_NAMESPACE::RemoteAddressNV address; 18021 Result result = static_cast<Result>( d.vkGetMemoryRemoteAddressNV( 18022 m_device, 18023 reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &getMemoryRemoteAddressInfo ), 18024 reinterpret_cast<VkRemoteAddressNV *>( &address ) ) ); 18025 return createResultValue( result, address, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); 18026 } 18027 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18028 18029 //=== VK_EXT_extended_dynamic_state2 === 18030 18031 template <typename Dispatch> setPatchControlPointsEXT(uint32_t patchControlPoints,Dispatch const & d) const18032 VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, 18033 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18034 { 18035 d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); 18036 } 18037 18038 template <typename Dispatch> 18039 VULKAN_HPP_INLINE void setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const18040 CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, 18041 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18042 { 18043 d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); 18044 } 18045 18046 template <typename Dispatch> setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const18047 VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, 18048 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18049 { 18050 d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); 18051 } 18052 18053 template <typename Dispatch> setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp,Dispatch const & d) const18054 VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, 18055 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18056 { 18057 d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) ); 18058 } 18059 18060 template <typename Dispatch> 18061 VULKAN_HPP_INLINE void setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const18062 CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, 18063 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18064 { 18065 d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); 18066 } 18067 18068 #if defined( VK_USE_PLATFORM_SCREEN_QNX ) 18069 //=== VK_QNX_screen_surface === 18070 18071 template <typename Dispatch> 18072 VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createScreenSurfaceQNX(const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const18073 Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, 18074 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, 18075 VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, 18076 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18077 { 18078 return static_cast<Result>( 18079 d.vkCreateScreenSurfaceQNX( m_instance, 18080 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), 18081 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), 18082 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); 18083 } 18084 18085 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18086 template <typename Dispatch> 18087 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 18088 typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createScreenSurfaceQNX(const ScreenSurfaceCreateInfoQNX & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const18089 Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo, 18090 Optional<const AllocationCallbacks> allocator, 18091 Dispatch const & d ) const 18092 { 18093 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 18094 Result result = static_cast<Result>( 18095 d.vkCreateScreenSurfaceQNX( m_instance, 18096 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 18097 reinterpret_cast<const VkAllocationCallbacks *>( 18098 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18099 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 18100 return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); 18101 } 18102 18103 # ifndef VULKAN_HPP_NO_SMART_HANDLE 18104 template <typename Dispatch> 18105 VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE 18106 typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createScreenSurfaceQNXUnique(const ScreenSurfaceCreateInfoQNX & createInfo,Optional<const AllocationCallbacks> allocator,Dispatch const & d) const18107 Instance::createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo, 18108 Optional<const AllocationCallbacks> allocator, 18109 Dispatch const & d ) const 18110 { 18111 VULKAN_HPP_NAMESPACE::SurfaceKHR surface; 18112 Result result = static_cast<Result>( 18113 d.vkCreateScreenSurfaceQNX( m_instance, 18114 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), 18115 reinterpret_cast<const VkAllocationCallbacks *>( 18116 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), 18117 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); 18118 ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d ); 18119 return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( 18120 result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter ); 18121 } 18122 # endif /*VULKAN_HPP_NO_SMART_HANDLE*/ 18123 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18124 18125 template <typename Dispatch> getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window * window,Dispatch const & d) const18126 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( 18127 uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18128 { 18129 return static_cast<Bool32>( 18130 d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); 18131 } 18132 18133 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18134 template <typename Dispatch> getScreenPresentationSupportQNX(uint32_t queueFamilyIndex,struct _screen_window & window,Dispatch const & d) const18135 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( 18136 uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18137 { 18138 return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); 18139 } 18140 # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18141 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ 18142 18143 //=== VK_EXT_color_write_enable === 18144 18145 template <typename Dispatch> setColorWriteEnableEXT(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,Dispatch const & d) const18146 VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, 18147 const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, 18148 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18149 { 18150 d.vkCmdSetColorWriteEnableEXT( 18151 m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); 18152 } 18153 18154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18155 template <typename Dispatch> 18156 VULKAN_HPP_INLINE void setColorWriteEnableEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,Dispatch const & d) const18157 CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, 18158 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18159 { 18160 d.vkCmdSetColorWriteEnableEXT( 18161 m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) ); 18162 } 18163 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18164 18165 //=== VK_EXT_multi_draw === 18166 18167 template <typename Dispatch> drawMultiEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Dispatch const & d) const18168 VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, 18169 const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, 18170 uint32_t instanceCount, 18171 uint32_t firstInstance, 18172 uint32_t stride, 18173 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18174 { 18175 d.vkCmdDrawMultiEXT( m_commandBuffer, 18176 drawCount, 18177 reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), 18178 instanceCount, 18179 firstInstance, 18180 stride ); 18181 } 18182 18183 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18184 template <typename Dispatch> 18185 VULKAN_HPP_INLINE void drawMultiEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Dispatch const & d) const18186 CommandBuffer::drawMultiEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo, 18187 uint32_t instanceCount, 18188 uint32_t firstInstance, 18189 uint32_t stride, 18190 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18191 { 18192 d.vkCmdDrawMultiEXT( m_commandBuffer, 18193 vertexInfo.size(), 18194 reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), 18195 instanceCount, 18196 firstInstance, 18197 stride ); 18198 } 18199 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18200 18201 template <typename Dispatch> 18202 VULKAN_HPP_INLINE void drawMultiIndexedEXT(uint32_t drawCount,const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset,Dispatch const & d) const18203 CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, 18204 const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, 18205 uint32_t instanceCount, 18206 uint32_t firstInstance, 18207 uint32_t stride, 18208 const int32_t * pVertexOffset, 18209 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18210 { 18211 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, 18212 drawCount, 18213 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), 18214 instanceCount, 18215 firstInstance, 18216 stride, 18217 pVertexOffset ); 18218 } 18219 18220 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE 18221 template <typename Dispatch> drawMultiIndexedEXT(ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,Optional<const int32_t> vertexOffset,Dispatch const & d) const18222 VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( 18223 ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo, 18224 uint32_t instanceCount, 18225 uint32_t firstInstance, 18226 uint32_t stride, 18227 Optional<const int32_t> vertexOffset, 18228 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT 18229 { 18230 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, 18231 indexInfo.size(), 18232 reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), 18233 instanceCount, 18234 firstInstance, 18235 stride, 18236 static_cast<const int32_t *>( vertexOffset ) ); 18237 } 18238 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ 18239 18240 } // namespace VULKAN_HPP_NAMESPACE 18241 #endif 18242