1 // Copyright (C) 2018 The Android Open Source Project
2 // Copyright (C) 2018 Google Inc.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15 #pragma once
16
17 #include <vulkan/vulkan.h>
18
19 #ifdef __cplusplus
20 #include <algorithm>
21 extern "C" {
22 #endif
23
24 #define VK_ANDROID_native_buffer 1
25 #define VK_ANDROID_NATIVE_BUFFER_EXTENSION_NUMBER 11
26
27 /* NOTE ON VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION 6
28 *
29 * This version of the extension transitions from gralloc0 to gralloc1 usage
30 * flags (int -> 2x uint64_t). The WSI implementation will temporarily continue
31 * to fill out deprecated fields in VkNativeBufferANDROID, and will call the
32 * deprecated vkGetSwapchainGrallocUsageANDROID if the new
33 * vkGetSwapchainGrallocUsage2ANDROID is not supported. This transitionary
34 * backwards-compatibility support is temporary, and will likely be removed in
35 * (along with all gralloc0 support) in a future release.
36 */
37 #define VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION 7
38 #define VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME "VK_ANDROID_native_buffer"
39
40 #define VK_ANDROID_NATIVE_BUFFER_ENUM(type,id) ((type)(1000000000 + (1000 * (VK_ANDROID_NATIVE_BUFFER_EXTENSION_NUMBER - 1)) + (id)))
41 #define VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID VK_ANDROID_NATIVE_BUFFER_ENUM(VkStructureType, 0)
42 #define VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID VK_ANDROID_NATIVE_BUFFER_ENUM(VkStructureType, 1)
43 #define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID VK_ANDROID_NATIVE_BUFFER_ENUM(VkStructureType, 2)
44
45 typedef enum VkSwapchainImageUsageFlagBitsANDROID {
46 VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID = 0x00000001,
47 VK_SWAPCHAIN_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
48 } VkSwapchainImageUsageFlagBitsANDROID;
49 typedef VkFlags VkSwapchainImageUsageFlagsANDROID;
50
51 typedef struct {
52 VkStructureType sType; // must be VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID
53 const void* pNext;
54
55 // Buffer handle and stride returned from gralloc alloc()
56 const uint32_t* handle;
57 int stride;
58
59 // Gralloc format and usage requested when the buffer was allocated.
60 int format;
61 int usage; // DEPRECATED in SPEC_VERSION 6
62 // -- Added in SPEC_VERSION 6 --
63 uint64_t consumer;
64 uint64_t producer;
65 } VkNativeBufferANDROID;
66
67 typedef struct {
68 VkStructureType sType; // must be VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID
69 const void* pNext;
70
71 VkSwapchainImageUsageFlagsANDROID usage;
72 } VkSwapchainImageCreateInfoANDROID;
73
74 typedef struct {
75 VkStructureType sType; // must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID
76 const void* pNext;
77
78 VkBool32 sharedImage;
79 } VkPhysicalDevicePresentationPropertiesANDROID;
80
81 // -- DEPRECATED in SPEC_VERSION 6 --
82 typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainGrallocUsageANDROID)(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int* grallocUsage);
83 // -- ADDED in SPEC_VERSION 6 --
84 typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainGrallocUsage2ANDROID)(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage, uint64_t* grallocProducerUsage);
85 typedef VkResult (VKAPI_PTR *PFN_vkAcquireImageANDROID)(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence);
86 typedef VkResult (VKAPI_PTR *PFN_vkQueueSignalReleaseImageANDROID)(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd);
87
88 typedef VkResult (VKAPI_PTR *PFN_vkMapMemoryIntoAddressSpaceGOOGLE)(VkDevice device, VkDeviceMemory memory, uint64_t* pAddress);
89
90 #define VK_GOOGLE_gfxstream 1
91 #define VK_GOOGLE_GFXSTREAM_EXTENSION_NUMBER 386
92
93 #define VK_GOOGLE_GFXSTREAM_ENUM(type,id) ((type)(1000000000 + (1000 * (VK_GOOGLE_GFXSTREAM_EXTENSION_NUMBER - 1)) + (id)))
94 #define VK_STRUCTURE_TYPE_IMPORT_COLOR_BUFFER_GOOGLE VK_GOOGLE_GFXSTREAM_ENUM(VkStructureType, 0)
95 #define VK_STRUCTURE_TYPE_IMPORT_PHYSICAL_ADDRESS_GOOGLE VK_GOOGLE_GFXSTREAM_ENUM(VkStructureType, 1)
96 #define VK_STRUCTURE_TYPE_IMPORT_BUFFER_GOOGLE VK_GOOGLE_GFXSTREAM_ENUM(VkStructureType, 2)
97
98 typedef struct {
99 VkStructureType sType; // must be VK_STRUCTURE_TYPE_IMPORT_COLOR_BUFFER_GOOGLE
100 const void* pNext;
101 uint32_t colorBuffer;
102 } VkImportColorBufferGOOGLE;
103
104 typedef struct {
105 VkStructureType sType; // must be VK_STRUCTURE_TYPE_IMPORT_PHYSICAL_ADDRESS_GOOGLE
106 const void* pNext;
107 uint64_t physicalAddress;
108 VkDeviceSize size;
109 VkFormat format;
110 VkImageTiling tiling;
111 uint32_t tilingParameter;
112 } VkImportPhysicalAddressGOOGLE;
113
114 typedef struct {
115 VkStructureType sType; // must be VK_STRUCTURE_TYPE_IMPORT_BUFFER_GOOGLE
116 const void* pNext;
117 uint32_t buffer;
118 } VkImportBufferGOOGLE;
119
120 typedef VkResult (VKAPI_PTR *PFN_vkRegisterImageColorBufferGOOGLE)(VkDevice device, VkImage image, uint32_t colorBuffer);
121 typedef VkResult (VKAPI_PTR *PFN_vkRegisterBufferColorBufferGOOGLE)(VkDevice device, VkBuffer image, uint32_t colorBuffer);
122
123 typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostAddressInfoGOOGLE)(VkDevice device, VkDeviceMemory memory, uint64_t* pAddress, uint64_t* pSize);
124
125 typedef VkResult (VKAPI_PTR *PFN_vkFreeMemorySyncGOOGLE)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocationCallbacks);
126
127 #define VK_ANDROID_external_memory_android_hardware_buffer 1
128 struct AHardwareBuffer;
129 struct VkAndroidHardwareBufferPropertiesANDROID;
130 struct VkMemoryGetAndroidHardwareBufferInfoANDROID;
131
132 #ifdef __Fuchsia__
133
134 typedef struct VkAndroidHardwareBufferUsageANDROID {
135 VkStructureType sType;
136 void* pNext;
137 uint64_t androidHardwareBufferUsage;
138 } VkAndroidHardwareBufferUsageANDROID;
139
140 typedef struct VkAndroidHardwareBufferPropertiesANDROID {
141 VkStructureType sType;
142 void* pNext;
143 VkDeviceSize allocationSize;
144 uint32_t memoryTypeBits;
145 } VkAndroidHardwareBufferPropertiesANDROID;
146
147 typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID {
148 VkStructureType sType;
149 void* pNext;
150 VkFormat format;
151 uint64_t externalFormat;
152 VkFormatFeatureFlags formatFeatures;
153 VkComponentMapping samplerYcbcrConversionComponents;
154 VkSamplerYcbcrModelConversion suggestedYcbcrModel;
155 VkSamplerYcbcrRange suggestedYcbcrRange;
156 VkChromaLocation suggestedXChromaOffset;
157 VkChromaLocation suggestedYChromaOffset;
158 } VkAndroidHardwareBufferFormatPropertiesANDROID;
159
160 typedef struct VkImportAndroidHardwareBufferInfoANDROID {
161 VkStructureType sType;
162 const void* pNext;
163 struct AHardwareBuffer* buffer;
164 } VkImportAndroidHardwareBufferInfoANDROID;
165
166 typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID {
167 VkStructureType sType;
168 const void* pNext;
169 VkDeviceMemory memory;
170 } VkMemoryGetAndroidHardwareBufferInfoANDROID;
171
172 typedef struct VkExternalFormatANDROID {
173 VkStructureType sType;
174 void* pNext;
175 uint64_t externalFormat;
176 } VkExternalFormatANDROID;
177
178
179 typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties);
180 typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer);
181
182 #ifndef VK_NO_PROTOTYPES
183 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(
184 VkDevice device,
185 const struct AHardwareBuffer* buffer,
186 VkAndroidHardwareBufferPropertiesANDROID* pProperties);
187
188 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(
189 VkDevice device,
190 const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
191 struct AHardwareBuffer** pBuffer);
192 #endif
193
194 /**
195 * Buffer pixel formats.
196 */
197 enum AHardwareBuffer_Format {
198 /**
199 * Corresponding formats:
200 * Vulkan: VK_FORMAT_R8G8B8A8_UNORM
201 * OpenGL ES: GL_RGBA8
202 */
203 AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM = 1,
204 /**
205 * 32 bits per pixel, 8 bits per channel format where alpha values are
206 * ignored (always opaque).
207 * Corresponding formats:
208 * Vulkan: VK_FORMAT_R8G8B8A8_UNORM
209 * OpenGL ES: GL_RGB8
210 */
211 AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM = 2,
212 /**
213 * Corresponding formats:
214 * Vulkan: VK_FORMAT_R8G8B8_UNORM
215 * OpenGL ES: GL_RGB8
216 */
217 AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM = 3,
218 /**
219 * Corresponding formats:
220 * Vulkan: VK_FORMAT_R5G6B5_UNORM_PACK16
221 * OpenGL ES: GL_RGB565
222 */
223 AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM = 4,
224 /**
225 * Corresponding formats:
226 * Vulkan: VK_FORMAT_R16G16B16A16_SFLOAT
227 * OpenGL ES: GL_RGBA16F
228 */
229 AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT = 0x16,
230 /**
231 * Corresponding formats:
232 * Vulkan: VK_FORMAT_A2B10G10R10_UNORM_PACK32
233 * OpenGL ES: GL_RGB10_A2
234 */
235 AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM = 0x2b,
236 /**
237 * Opaque binary blob format.
238 * Must have height 1 and one layer, with width equal to the buffer
239 * size in bytes. Corresponds to Vulkan buffers and OpenGL buffer
240 * objects. Can be bound to the latter using GL_EXT_external_buffer.
241 */
242 AHARDWAREBUFFER_FORMAT_BLOB = 0x21,
243 /**
244 * Corresponding formats:
245 * Vulkan: VK_FORMAT_D16_UNORM
246 * OpenGL ES: GL_DEPTH_COMPONENT16
247 */
248 AHARDWAREBUFFER_FORMAT_D16_UNORM = 0x30,
249 /**
250 * Corresponding formats:
251 * Vulkan: VK_FORMAT_X8_D24_UNORM_PACK32
252 * OpenGL ES: GL_DEPTH_COMPONENT24
253 */
254 AHARDWAREBUFFER_FORMAT_D24_UNORM = 0x31,
255 /**
256 * Corresponding formats:
257 * Vulkan: VK_FORMAT_D24_UNORM_S8_UINT
258 * OpenGL ES: GL_DEPTH24_STENCIL8
259 */
260 AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT = 0x32,
261 /**
262 * Corresponding formats:
263 * Vulkan: VK_FORMAT_D32_SFLOAT
264 * OpenGL ES: GL_DEPTH_COMPONENT32F
265 */
266 AHARDWAREBUFFER_FORMAT_D32_FLOAT = 0x33,
267 /**
268 * Corresponding formats:
269 * Vulkan: VK_FORMAT_D32_SFLOAT_S8_UINT
270 * OpenGL ES: GL_DEPTH32F_STENCIL8
271 */
272 AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT = 0x34,
273 /**
274 * Corresponding formats:
275 * Vulkan: VK_FORMAT_S8_UINT
276 * OpenGL ES: GL_STENCIL_INDEX8
277 */
278 AHARDWAREBUFFER_FORMAT_S8_UINT = 0x35,
279 /**
280 * YUV 420 888 format.
281 * Must have an even width and height. Can be accessed in OpenGL
282 * shaders through an external sampler. Does not support mip-maps
283 * cube-maps or multi-layered textures.
284 */
285 AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420 = 0x23,
286 };
287 /**
288 * Buffer usage flags, specifying how the buffer will be accessed.
289 */
290 enum AHardwareBuffer_UsageFlags {
291 /// The buffer will never be locked for direct CPU reads using the
292 /// AHardwareBuffer_lock() function. Note that reading the buffer
293 /// using OpenGL or Vulkan functions or memory mappings is still
294 /// allowed.
295 AHARDWAREBUFFER_USAGE_CPU_READ_NEVER = 0UL,
296 /// The buffer will sometimes be locked for direct CPU reads using
297 /// the AHardwareBuffer_lock() function. Note that reading the
298 /// buffer using OpenGL or Vulkan functions or memory mappings
299 /// does not require the presence of this flag.
300 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY = 2UL,
301 /// The buffer will often be locked for direct CPU reads using
302 /// the AHardwareBuffer_lock() function. Note that reading the
303 /// buffer using OpenGL or Vulkan functions or memory mappings
304 /// does not require the presence of this flag.
305 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN = 3UL,
306 /// CPU read value mask.
307 AHARDWAREBUFFER_USAGE_CPU_READ_MASK = 0xFUL,
308 /// The buffer will never be locked for direct CPU writes using the
309 /// AHardwareBuffer_lock() function. Note that writing the buffer
310 /// using OpenGL or Vulkan functions or memory mappings is still
311 /// allowed.
312 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER = 0UL << 4,
313 /// The buffer will sometimes be locked for direct CPU writes using
314 /// the AHardwareBuffer_lock() function. Note that writing the
315 /// buffer using OpenGL or Vulkan functions or memory mappings
316 /// does not require the presence of this flag.
317 AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY = 2UL << 4,
318 /// The buffer will often be locked for direct CPU writes using
319 /// the AHardwareBuffer_lock() function. Note that writing the
320 /// buffer using OpenGL or Vulkan functions or memory mappings
321 /// does not require the presence of this flag.
322 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN = 3UL << 4,
323 /// CPU write value mask.
324 AHARDWAREBUFFER_USAGE_CPU_WRITE_MASK = 0xFUL << 4,
325 /// The buffer will be read from by the GPU as a texture.
326 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE = 1UL << 8,
327 /**
328 * The buffer will be written to by the GPU as a framebuffer
329 * attachment.
330 *
331 * Note that the name of this flag is somewhat misleading: it does
332 * not imply that the buffer contains a color format. A buffer with
333 * depth or stencil format that will be used as a framebuffer
334 * attachment should also have this flag.
335 */
336 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT = 1UL << 9,
337 /**
338 * The buffer is protected from direct CPU access or being read by
339 * non-secure hardware, such as video encoders.
340 *
341 * This flag is incompatible with CPU read and write flags. It is
342 * mainly used when handling DRM video. Refer to the EGL extension
343 * EGL_EXT_protected_content and GL extension
344 * GL_EXT_protected_textures for more information on how these
345 * buffers are expected to behave.
346 */
347 AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT = 1UL << 14,
348 /// The buffer will be read by a hardware video encoder.
349 AHARDWAREBUFFER_USAGE_VIDEO_ENCODE = 1UL << 16,
350 /**
351 * The buffer will be used for direct writes from sensors.
352 * When this flag is present, the format must be AHARDWAREBUFFER_FORMAT_BLOB.
353 */
354 AHARDWAREBUFFER_USAGE_SENSOR_DIRECT_DATA = 1UL << 23,
355 /**
356 * The buffer will be used as a shader storage or uniform buffer object.
357 * When this flag is present, the format must be AHARDWAREBUFFER_FORMAT_BLOB.
358 */
359 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER = 1UL << 24,
360 /**
361 * The buffer will be used as a cube map texture.
362 * When this flag is present, the buffer must have a layer count
363 * that is a multiple of 6. Note that buffers with this flag must be
364 * bound to OpenGL textures using the extension
365 * GL_EXT_EGL_image_storage instead of GL_KHR_EGL_image.
366 */
367 AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP = 1UL << 25,
368 /**
369 * The buffer contains a complete mipmap hierarchy.
370 * Note that buffers with this flag must be bound to OpenGL textures using
371 * the extension GL_EXT_EGL_image_storage instead of GL_KHR_EGL_image.
372 */
373 AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE = 1UL << 26,
374 AHARDWAREBUFFER_USAGE_VENDOR_0 = 1ULL << 28,
375 AHARDWAREBUFFER_USAGE_VENDOR_1 = 1ULL << 29,
376 AHARDWAREBUFFER_USAGE_VENDOR_2 = 1ULL << 30,
377 AHARDWAREBUFFER_USAGE_VENDOR_3 = 1ULL << 31,
378 AHARDWAREBUFFER_USAGE_VENDOR_4 = 1ULL << 48,
379 AHARDWAREBUFFER_USAGE_VENDOR_5 = 1ULL << 49,
380 AHARDWAREBUFFER_USAGE_VENDOR_6 = 1ULL << 50,
381 AHARDWAREBUFFER_USAGE_VENDOR_7 = 1ULL << 51,
382 AHARDWAREBUFFER_USAGE_VENDOR_8 = 1ULL << 52,
383 AHARDWAREBUFFER_USAGE_VENDOR_9 = 1ULL << 53,
384 AHARDWAREBUFFER_USAGE_VENDOR_10 = 1ULL << 54,
385 AHARDWAREBUFFER_USAGE_VENDOR_11 = 1ULL << 55,
386 AHARDWAREBUFFER_USAGE_VENDOR_12 = 1ULL << 56,
387 AHARDWAREBUFFER_USAGE_VENDOR_13 = 1ULL << 57,
388 AHARDWAREBUFFER_USAGE_VENDOR_14 = 1ULL << 58,
389 AHARDWAREBUFFER_USAGE_VENDOR_15 = 1ULL << 59,
390 AHARDWAREBUFFER_USAGE_VENDOR_16 = 1ULL << 60,
391 AHARDWAREBUFFER_USAGE_VENDOR_17 = 1ULL << 61,
392 AHARDWAREBUFFER_USAGE_VENDOR_18 = 1ULL << 62,
393 AHARDWAREBUFFER_USAGE_VENDOR_19 = 1ULL << 63,
394 };
395 /**
396 * Buffer description. Used for allocating new buffers and querying
397 * parameters of existing ones.
398 */
399 typedef struct AHardwareBuffer_Desc {
400 uint32_t width; ///< Width in pixels.
401 uint32_t height; ///< Height in pixels.
402 /**
403 * Number of images in an image array. AHardwareBuffers with one
404 * layer correspond to regular 2D textures. AHardwareBuffers with
405 * more than layer correspond to texture arrays. If the layer count
406 * is a multiple of 6 and the usage flag
407 * AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP is present, the buffer is
408 * a cube map or a cube map array.
409 */
410 uint32_t layers;
411 uint32_t format; ///< One of AHardwareBuffer_Format.
412 uint64_t usage; ///< Combination of AHardwareBuffer_UsageFlags.
413 uint32_t stride; ///< Row stride in pixels, ignored for AHardwareBuffer_allocate()
414 uint32_t rfu0; ///< Initialize to zero, reserved for future use.
415 uint64_t rfu1; ///< Initialize to zero, reserved for future use.
416 } AHardwareBuffer_Desc;
417
418 #endif // __Fuchsia__
419
420 #define VK_GOOGLE_sized_descriptor_update_template 1
421
422 typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateSizedGOOGLE)(
423 VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate,
424 uint32_t imageInfoCount,
425 uint32_t bufferInfoCount,
426 uint32_t bufferViewCount,
427 const uint32_t* pImageInfoEntryIndices,
428 const uint32_t* pBufferInfoEntryIndices,
429 const uint32_t* pBufferViewEntryIndices,
430 const VkDescriptorImageInfo* pImageInfos,
431 const VkDescriptorBufferInfo* pBufferInfos,
432 const VkBufferView* pBufferViews);
433
434 #define VK_GOOGLE_async_command_buffers 1
435
436 typedef void (VKAPI_PTR *PFN_vkBeginCommandBufferAsyncGOOGLE)(
437 VkCommandBuffer commandBuffer,
438 const VkCommandBufferBeginInfo* pBeginInfo);
439 typedef void (VKAPI_PTR *PFN_vkEndCommandBufferAsyncGOOGLE)(
440 VkCommandBuffer commandBuffer);
441 typedef void (VKAPI_PTR *PFN_vkResetCommandBufferAsyncGOOGLE)(
442 VkCommandBuffer commandBuffer,
443 VkCommandBufferResetFlags flags);
444 typedef void (VKAPI_PTR *PFN_vkCommandBufferHostSyncGOOGLE)(
445 VkCommandBuffer commandBuffer,
446 uint32_t needHostSync,
447 uint32_t sequenceNumber);
448
449 typedef void (VKAPI_PTR *PFN_vkCreateImageWithRequirementsGOOGLE)(
450 VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage, VkMemoryRequirements* pMemoryRequirements);
451
452 #ifndef VK_FUCHSIA_buffer_collection
453 #define VK_FUCHSIA_buffer_collection 1
454 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA)
455
456 #define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 1
457 #define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection"
458
459 typedef struct VkBufferCollectionCreateInfoFUCHSIA {
460 VkStructureType sType;
461 const void* pNext;
462 uint32_t collectionToken;
463 } VkBufferCollectionCreateInfoFUCHSIA;
464
465 typedef struct VkImportMemoryBufferCollectionFUCHSIA {
466 VkStructureType sType;
467 const void* pNext;
468 VkBufferCollectionFUCHSIA collection;
469 uint32_t index;
470 } VkImportMemoryBufferCollectionFUCHSIA;
471
472 typedef struct VkBufferCollectionImageCreateInfoFUCHSIA {
473 VkStructureType sType;
474 const void* pNext;
475 VkBufferCollectionFUCHSIA collection;
476 uint32_t index;
477 } VkBufferCollectionImageCreateInfoFUCHSIA;
478
479 typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA {
480 VkStructureType sType;
481 const void* pNext;
482 VkBufferCollectionFUCHSIA collection;
483 uint32_t index;
484 } VkBufferCollectionBufferCreateInfoFUCHSIA;
485
486 typedef struct VkBufferCollectionPropertiesFUCHSIA {
487 VkStructureType sType;
488 void* pNext;
489 uint32_t memoryTypeBits;
490 uint32_t count;
491 } VkBufferCollectionPropertiesFUCHSIA;
492
493 #define VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA \
494 ((VkStructureType)1001004004)
495 #define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA \
496 ((VkStructureType)1001004005)
497 #define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA \
498 ((VkStructureType)1001004008)
499 #endif // VK_FUCHSIA_buffer_collection
500
501 #ifndef VK_FUCHSIA_external_memory
502 #define VK_FUCHSIA_external_memory 1
503 #define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1
504 #define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory"
505
506 typedef struct VkBufferConstraintsInfoFUCHSIA {
507 VkStructureType sType;
508 const void* pNext;
509 const VkBufferCreateInfo* pBufferCreateInfo;
510 VkFormatFeatureFlags requiredFormatFeatures;
511 uint32_t minCount;
512 } VkBufferConstraintsInfoFUCHSIA;
513
514 typedef struct VkImportMemoryZirconHandleInfoFUCHSIA {
515 VkStructureType sType;
516 const void* pNext;
517 VkExternalMemoryHandleTypeFlagBits handleType;
518 uint32_t handle;
519 } VkImportMemoryZirconHandleInfoFUCHSIA;
520
521 typedef struct VkMemoryZirconHandlePropertiesFUCHSIA {
522 VkStructureType sType;
523 void* pNext;
524 uint32_t memoryTypeBits;
525 } VkMemoryZirconHandlePropertiesFUCHSIA;
526
527 typedef struct VkMemoryGetZirconHandleInfoFUCHSIA {
528 VkStructureType sType;
529 const void* pNext;
530 VkDeviceMemory memory;
531 VkExternalMemoryHandleTypeFlagBits handleType;
532 } VkMemoryGetZirconHandleInfoFUCHSIA;
533
534 #define VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA \
535 ((VkStructureType)1001004008)
536
537 #if VK_HEADER_VERSION < 174
538 #define VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA \
539 ((VkStructureType)1000364000)
540 #define VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA \
541 ((VkExternalMemoryHandleTypeFlagBits)0x00000800)
542 #endif
543
544 // Deprecated
545 #define VK_STRUCTURE_TYPE_TEMP_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA \
546 ((VkStructureType)1001005000)
547 #define VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA \
548 ((VkExternalMemoryHandleTypeFlagBits)0x00100000)
549
550 #else // VK_FUCHSIA_external_memory
551
552 // For backward compatibility
553 #if VK_HEADER_VERSION >= 174
554 #define VK_STRUCTURE_TYPE_TEMP_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA \
555 ((VkStructureType)1001005000)
556 #define VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA \
557 ((VkExternalMemoryHandleTypeFlagBits)0x00100000)
558 #endif // VK_HEADER_VERSION >= 174
559
560 // For forward compatibility
561 #ifndef VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA
562 #define VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA ((VkStructureType)1000364000)
563 #endif // VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA
564
565 // For forward compatibility
566 #ifndef VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA
567 #define VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA \
568 ((VkExternalMemoryHandleTypeFlagBits)0x00000800)
569 #endif // VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA
570
571 #endif // VK_FUCHSIA_external_memory
572
573
574 #ifndef VK_FUCHSIA_external_semaphore
575 #define VK_FUCHSIA_external_semaphore 1
576 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
577 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore"
578
579 typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA {
580 VkStructureType sType;
581 const void* pNext;
582 VkSemaphore semaphore;
583 VkSemaphoreImportFlags flags;
584 VkExternalSemaphoreHandleTypeFlagBits handleType;
585 #if VK_HEADER_VERSION < 174
586 uint32_t handle;
587 #else // VK_HEADER_VERSION >= 174
588 uint32_t zirconHandle;
589 #endif // VK_HEADER_VERSION < 174
590 } VkImportSemaphoreZirconHandleInfoFUCHSIA;
591
592 typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA {
593 VkStructureType sType;
594 const void* pNext;
595 VkSemaphore semaphore;
596 VkExternalSemaphoreHandleTypeFlagBits handleType;
597 } VkSemaphoreGetZirconHandleInfoFUCHSIA;
598
599 #if VK_HEADER_VERSION < 174
600 #define VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA \
601 ((VkExternalMemoryHandleTypeFlagBits)0x00000080)
602 #endif
603
604 // Deprecated
605 #define VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA \
606 ((VkExternalSemaphoreHandleTypeFlagBits)0x00100000)
607
608 #else // VK_FUCHSIA_external_semaphore
609
610 // For backward compatibility
611 #if VK_HEADER_VERSION >= 174
612 #define VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA \
613 ((VkExternalSemaphoreHandleTypeFlagBits)0x00100000)
614 #endif // VK_HEADER_VERSION >= 174
615
616 // For forward compatibility
617 #ifndef VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA
618 #define VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA \
619 ((VkExternalMemoryHandleTypeFlagBits)0x00000080)
620 #endif // VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA
621
622 #endif // VK_FUCHSIA_external_semaphore
623
624
625 // VulkanStream features
626 #define VULKAN_STREAM_FEATURE_NULL_OPTIONAL_STRINGS_BIT (1 << 0)
627 #define VULKAN_STREAM_FEATURE_IGNORED_HANDLES_BIT (1 << 1)
628 #define VULKAN_STREAM_FEATURE_SHADER_FLOAT16_INT8_BIT (1 << 2)
629 #define VULKAN_STREAM_FEATURE_QUEUE_SUBMIT_WITH_COMMANDS_BIT (1 << 3)
630
631 #define VK_YCBCR_CONVERSION_DO_NOTHING ((VkSamplerYcbcrConversion)0x1111111111111111)
632
633 // Stuff we advertised but didn't define the structs for it yet because
634 // we also needed to update our vulkan headers and xml
635
636 #ifndef VK_VERSION_1_2
637
638 typedef struct VkPhysicalDeviceShaderFloat16Int8Features {
639 VkStructureType sType;
640 void* pNext;
641 VkBool32 shaderFloat16;
642 VkBool32 shaderInt8;
643 } VkPhysicalDeviceShaderFloat16Int8Features;
644
645
646 #define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES \
647 ((VkStructureType)1000082000)
648
649 #endif
650
651 #define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR \
652 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES
653
654 #define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR \
655 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES
656
657 #ifndef VK_KHR_shader_float16_int8
658
659 #define VK_KHR_shader_float16_int8 1
660 #define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1
661 #define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8"
662 typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR;
663 typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR;
664
665 #endif
666
667 #define VK_GOOGLE_gfxstream 1
668
669 typedef void (VKAPI_PTR *PFN_vkQueueHostSyncGOOGLE)(
670 VkQueue queue, uint32_t needHostSync, uint32_t sequenceNumber);
671 typedef void (VKAPI_PTR *PFN_vkQueueSubmitAsyncGOOGLE)(
672 VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
673 typedef void (VKAPI_PTR *PFN_vkQueueWaitIdleAsyncGOOGLE)(VkQueue queue);
674 typedef void (VKAPI_PTR *PFN_vkQueueBindSparseAsyncGOOGLE)(
675 VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
676
677 typedef VkResult (VKAPI_PTR *PFN_vkGetLinearImageLayoutGOOGLE)(VkDevice device, VkFormat format, VkDeviceSize* pOffset, VkDeviceSize* pRowPitchAlignment);
678
679 typedef void (VKAPI_PTR *PFN_vkQueueFlushCommandsGOOGLE)(VkQueue queue, VkDeviceSize dataSize, const void* pData);
680 typedef void (VKAPI_PTR *PFN_vkQueueCommitDescriptorSetUpdatesGOOGLE)(VkQueue queue, uint32_t descriptorPoolCount, const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount, const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds, const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation, const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount, const VkWriteDescriptorSet* pPendingDescriptorWrites);
681
682 #ifdef __cplusplus
683 } // extern "C"
684 #endif
685
686 #ifdef __cplusplus
687
688 template<class T, typename F>
arrayany(const T * arr,uint32_t begin,uint32_t end,const F & func)689 bool arrayany(const T* arr, uint32_t begin, uint32_t end, const F& func) {
690 const T* e = arr + end;
691 return std::find_if(arr + begin, e, func) != e;
692 }
693
694 #define DEFINE_ALIAS_FUNCTION(ORIGINAL_FN, ALIAS_FN) \
695 template <typename... Args> \
696 inline auto ALIAS_FN(Args&&... args) -> decltype(ORIGINAL_FN(std::forward<Args>(args)...)) { \
697 return ORIGINAL_FN(std::forward<Args>(args)...); \
698 }
699
700 #endif
701