1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <hardware/hwvulkan.h>
18
19 #include <errno.h>
20 #include <inttypes.h>
21 #include <stdlib.h>
22 #include <string.h>
23 #include <unistd.h>
24
25 #include <algorithm>
26 #include <array>
27
28 #include <log/log.h>
29
30 #include "null_driver_gen.h"
31
32 using namespace null_driver;
33
34 struct VkPhysicalDevice_T {
35 hwvulkan_dispatch_t dispatch;
36 };
37
38 struct VkInstance_T {
39 hwvulkan_dispatch_t dispatch;
40 VkAllocationCallbacks allocator;
41 VkPhysicalDevice_T physical_device;
42 uint64_t next_callback_handle;
43 };
44
45 struct VkQueue_T {
46 hwvulkan_dispatch_t dispatch;
47 };
48
49 struct VkCommandBuffer_T {
50 hwvulkan_dispatch_t dispatch;
51 };
52
53 namespace {
54 // Handles for non-dispatchable objects are either pointers, or arbitrary
55 // 64-bit non-zero values. We only use pointers when we need to keep state for
56 // the object even in a null driver. For the rest, we form a handle as:
57 // [63:63] = 1 to distinguish from pointer handles*
58 // [62:56] = non-zero handle type enum value
59 // [55: 0] = per-handle-type incrementing counter
60 // * This works because virtual addresses with the high bit set are reserved
61 // for kernel data in all ABIs we run on.
62 //
63 // We never reclaim handles on vkDestroy*. It's not even necessary for us to
64 // have distinct handles for live objects, and practically speaking we won't
65 // ever create 2^56 objects of the same type from a single VkDevice in a null
66 // driver.
67 //
68 // Using a namespace here instead of 'enum class' since we want scoped
69 // constants but also want implicit conversions to integral types.
70 namespace HandleType {
71 enum Enum {
72 kBufferView,
73 kDebugReportCallbackEXT,
74 kDescriptorPool,
75 kDescriptorSet,
76 kDescriptorSetLayout,
77 kEvent,
78 kFence,
79 kFramebuffer,
80 kImageView,
81 kPipeline,
82 kPipelineCache,
83 kPipelineLayout,
84 kQueryPool,
85 kRenderPass,
86 kSampler,
87 kSemaphore,
88 kShaderModule,
89
90 kNumTypes
91 };
92 } // namespace HandleType
93
94 const VkDeviceSize kMaxDeviceMemory = 0x10000000; // 256 MiB, arbitrary
95
96 } // anonymous namespace
97
98 struct VkDevice_T {
99 hwvulkan_dispatch_t dispatch;
100 VkAllocationCallbacks allocator;
101 VkInstance_T* instance;
102 VkQueue_T queue;
103 std::array<uint64_t, HandleType::kNumTypes> next_handle;
104 };
105
106 // -----------------------------------------------------------------------------
107 // Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
108 // later.
109
110 namespace {
111 int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
112 hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
113 } // namespace
114
115 #pragma clang diagnostic push
116 #pragma clang diagnostic ignored "-Wmissing-variable-declarations"
117 __attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
118 .common =
119 {
120 .tag = HARDWARE_MODULE_TAG,
121 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
122 .hal_api_version = HARDWARE_HAL_API_VERSION,
123 .id = HWVULKAN_HARDWARE_MODULE_ID,
124 .name = "Null Vulkan Driver",
125 .author = "The Android Open Source Project",
126 .methods = &nulldrv_module_methods,
127 },
128 };
129 #pragma clang diagnostic pop
130
131 // -----------------------------------------------------------------------------
132
133 namespace {
134
CloseDevice(struct hw_device_t *)135 int CloseDevice(struct hw_device_t* /*device*/) {
136 // nothing to do - opening a device doesn't allocate any resources
137 return 0;
138 }
139
140 hwvulkan_device_t nulldrv_device = {
141 .common =
142 {
143 .tag = HARDWARE_DEVICE_TAG,
144 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
145 .module = &HAL_MODULE_INFO_SYM.common,
146 .close = CloseDevice,
147 },
148 .EnumerateInstanceExtensionProperties =
149 EnumerateInstanceExtensionProperties,
150 .CreateInstance = CreateInstance,
151 .GetInstanceProcAddr = GetInstanceProcAddr};
152
OpenDevice(const hw_module_t *,const char * id,hw_device_t ** device)153 int OpenDevice(const hw_module_t* /*module*/,
154 const char* id,
155 hw_device_t** device) {
156 if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
157 *device = &nulldrv_device.common;
158 return 0;
159 }
160 return -ENOENT;
161 }
162
GetInstanceFromPhysicalDevice(VkPhysicalDevice_T * physical_device)163 VkInstance_T* GetInstanceFromPhysicalDevice(
164 VkPhysicalDevice_T* physical_device) {
165 return reinterpret_cast<VkInstance_T*>(
166 reinterpret_cast<uintptr_t>(physical_device) -
167 offsetof(VkInstance_T, physical_device));
168 }
169
AllocHandle(uint64_t type,uint64_t * next_handle)170 uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
171 const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
172 ALOGE_IF(*next_handle == kHandleMask,
173 "non-dispatchable handles of type=%" PRIu64
174 " are about to overflow",
175 type);
176 return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
177 ((*next_handle)++ & kHandleMask);
178 }
179
180 template <class Handle>
AllocHandle(VkInstance instance,HandleType::Enum type)181 Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
182 return reinterpret_cast<Handle>(
183 AllocHandle(type, &instance->next_callback_handle));
184 }
185
186 template <class Handle>
AllocHandle(VkDevice device,HandleType::Enum type)187 Handle AllocHandle(VkDevice device, HandleType::Enum type) {
188 return reinterpret_cast<Handle>(
189 AllocHandle(type, &device->next_handle[type]));
190 }
191
DefaultAllocate(void *,size_t size,size_t alignment,VkSystemAllocationScope)192 VKAPI_ATTR void* DefaultAllocate(void*,
193 size_t size,
194 size_t alignment,
195 VkSystemAllocationScope) {
196 void* ptr = nullptr;
197 // Vulkan requires 'alignment' to be a power of two, but posix_memalign
198 // additionally requires that it be at least sizeof(void*).
199 int ret = posix_memalign(&ptr, std::max(alignment, sizeof(void*)), size);
200 return ret == 0 ? ptr : nullptr;
201 }
202
DefaultReallocate(void *,void * ptr,size_t size,size_t alignment,VkSystemAllocationScope)203 VKAPI_ATTR void* DefaultReallocate(void*,
204 void* ptr,
205 size_t size,
206 size_t alignment,
207 VkSystemAllocationScope) {
208 if (size == 0) {
209 free(ptr);
210 return nullptr;
211 }
212
213 // TODO(jessehall): Right now we never shrink allocations; if the new
214 // request is smaller than the existing chunk, we just continue using it.
215 // The null driver never reallocs, so this doesn't matter. If that changes,
216 // or if this code is copied into some other project, this should probably
217 // have a heuristic to allocate-copy-free when doing so will save "enough"
218 // space.
219 size_t old_size = ptr ? malloc_usable_size(ptr) : 0;
220 if (size <= old_size)
221 return ptr;
222
223 void* new_ptr = nullptr;
224 if (posix_memalign(&new_ptr, std::max(alignment, sizeof(void*)), size) != 0)
225 return nullptr;
226 if (ptr) {
227 memcpy(new_ptr, ptr, std::min(old_size, size));
228 free(ptr);
229 }
230 return new_ptr;
231 }
232
DefaultFree(void *,void * ptr)233 VKAPI_ATTR void DefaultFree(void*, void* ptr) {
234 free(ptr);
235 }
236
237 const VkAllocationCallbacks kDefaultAllocCallbacks = {
238 .pUserData = nullptr,
239 .pfnAllocation = DefaultAllocate,
240 .pfnReallocation = DefaultReallocate,
241 .pfnFree = DefaultFree,
242 };
243
244 } // namespace
245
246 namespace null_driver {
247
248 #define DEFINE_OBJECT_HANDLE_CONVERSION(T) \
249 T* Get##T##FromHandle(Vk##T h); \
250 T* Get##T##FromHandle(Vk##T h) { \
251 return reinterpret_cast<T*>(uintptr_t(h)); \
252 } \
253 Vk##T GetHandleTo##T(const T* obj); \
254 Vk##T GetHandleTo##T(const T* obj) { \
255 return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
256 }
257
258 // -----------------------------------------------------------------------------
259 // Global
260
261 VKAPI_ATTR
EnumerateInstanceVersion(uint32_t * pApiVersion)262 VkResult EnumerateInstanceVersion(uint32_t* pApiVersion) {
263 *pApiVersion = VK_API_VERSION_1_1;
264 return VK_SUCCESS;
265 }
266
267 VKAPI_ATTR
EnumerateInstanceExtensionProperties(const char * layer_name,uint32_t * count,VkExtensionProperties * properties)268 VkResult EnumerateInstanceExtensionProperties(
269 const char* layer_name,
270 uint32_t* count,
271 VkExtensionProperties* properties) {
272 if (layer_name) {
273 ALOGW(
274 "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
275 "with a layer name ('%s')",
276 layer_name);
277 }
278
279 const VkExtensionProperties kExtensions[] = {
280 {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION}};
281 const uint32_t kExtensionsCount =
282 sizeof(kExtensions) / sizeof(kExtensions[0]);
283
284 if (!properties || *count > kExtensionsCount)
285 *count = kExtensionsCount;
286 if (properties)
287 std::copy(kExtensions, kExtensions + *count, properties);
288 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
289 }
290
291 VKAPI_ATTR
CreateInstance(const VkInstanceCreateInfo * create_info,const VkAllocationCallbacks * allocator,VkInstance * out_instance)292 VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
293 const VkAllocationCallbacks* allocator,
294 VkInstance* out_instance) {
295 if (!allocator)
296 allocator = &kDefaultAllocCallbacks;
297
298 VkInstance_T* instance =
299 static_cast<VkInstance_T*>(allocator->pfnAllocation(
300 allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
301 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
302 if (!instance)
303 return VK_ERROR_OUT_OF_HOST_MEMORY;
304
305 instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
306 instance->allocator = *allocator;
307 instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
308 instance->next_callback_handle = 0;
309
310 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
311 if (strcmp(create_info->ppEnabledExtensionNames[i],
312 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0) {
313 ALOGV("instance extension '%s' requested",
314 create_info->ppEnabledExtensionNames[i]);
315 } else if (strcmp(create_info->ppEnabledExtensionNames[i],
316 VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
317 ALOGV("instance extension '%s' requested",
318 create_info->ppEnabledExtensionNames[i]);
319 } else {
320 ALOGW("unsupported extension '%s' requested",
321 create_info->ppEnabledExtensionNames[i]);
322 }
323 }
324
325 *out_instance = instance;
326 return VK_SUCCESS;
327 }
328
329 VKAPI_ATTR
GetInstanceProcAddr(VkInstance instance,const char * name)330 PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
331 return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
332 }
333
334 VKAPI_ATTR
GetDeviceProcAddr(VkDevice,const char * name)335 PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
336 return GetInstanceProcAddr(name);
337 }
338
339 // -----------------------------------------------------------------------------
340 // Instance
341
DestroyInstance(VkInstance instance,const VkAllocationCallbacks *)342 void DestroyInstance(VkInstance instance,
343 const VkAllocationCallbacks* /*allocator*/) {
344 instance->allocator.pfnFree(instance->allocator.pUserData, instance);
345 }
346
347 // -----------------------------------------------------------------------------
348 // PhysicalDevice
349
EnumeratePhysicalDevices(VkInstance instance,uint32_t * physical_device_count,VkPhysicalDevice * physical_devices)350 VkResult EnumeratePhysicalDevices(VkInstance instance,
351 uint32_t* physical_device_count,
352 VkPhysicalDevice* physical_devices) {
353 if (!physical_devices)
354 *physical_device_count = 1;
355 else if (*physical_device_count == 0)
356 return VK_INCOMPLETE;
357 else {
358 physical_devices[0] = &instance->physical_device;
359 *physical_device_count = 1;
360 }
361 return VK_SUCCESS;
362 }
363
EnumerateDeviceLayerProperties(VkPhysicalDevice,uint32_t * count,VkLayerProperties *)364 VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
365 uint32_t* count,
366 VkLayerProperties* /*properties*/) {
367 ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
368 *count = 0;
369 return VK_SUCCESS;
370 }
371
EnumerateDeviceExtensionProperties(VkPhysicalDevice,const char * layer_name,uint32_t * count,VkExtensionProperties * properties)372 VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
373 const char* layer_name,
374 uint32_t* count,
375 VkExtensionProperties* properties) {
376 if (layer_name) {
377 ALOGW(
378 "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
379 "with a layer name ('%s')",
380 layer_name);
381 *count = 0;
382 return VK_SUCCESS;
383 }
384
385 const VkExtensionProperties kExtensions[] = {
386 {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
387 VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
388 const uint32_t kExtensionsCount =
389 sizeof(kExtensions) / sizeof(kExtensions[0]);
390
391 if (!properties || *count > kExtensionsCount)
392 *count = kExtensionsCount;
393 if (properties)
394 std::copy(kExtensions, kExtensions + *count, properties);
395 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
396 }
397
GetPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * properties)398 void GetPhysicalDeviceProperties(VkPhysicalDevice,
399 VkPhysicalDeviceProperties* properties) {
400 properties->apiVersion = VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION);
401 properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
402 properties->vendorID = 0;
403 properties->deviceID = 0;
404 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
405 strcpy(properties->deviceName, "Android Vulkan Null Driver");
406 memset(properties->pipelineCacheUUID, 0,
407 sizeof(properties->pipelineCacheUUID));
408 properties->limits = VkPhysicalDeviceLimits{
409 4096, // maxImageDimension1D
410 4096, // maxImageDimension2D
411 256, // maxImageDimension3D
412 4096, // maxImageDimensionCube
413 256, // maxImageArrayLayers
414 65536, // maxTexelBufferElements
415 16384, // maxUniformBufferRange
416 1 << 27, // maxStorageBufferRange
417 128, // maxPushConstantsSize
418 4096, // maxMemoryAllocationCount
419 4000, // maxSamplerAllocationCount
420 1, // bufferImageGranularity
421 0, // sparseAddressSpaceSize
422 4, // maxBoundDescriptorSets
423 16, // maxPerStageDescriptorSamplers
424 12, // maxPerStageDescriptorUniformBuffers
425 4, // maxPerStageDescriptorStorageBuffers
426 16, // maxPerStageDescriptorSampledImages
427 4, // maxPerStageDescriptorStorageImages
428 4, // maxPerStageDescriptorInputAttachments
429 128, // maxPerStageResources
430 96, // maxDescriptorSetSamplers
431 72, // maxDescriptorSetUniformBuffers
432 8, // maxDescriptorSetUniformBuffersDynamic
433 24, // maxDescriptorSetStorageBuffers
434 4, // maxDescriptorSetStorageBuffersDynamic
435 96, // maxDescriptorSetSampledImages
436 24, // maxDescriptorSetStorageImages
437 4, // maxDescriptorSetInputAttachments
438 16, // maxVertexInputAttributes
439 16, // maxVertexInputBindings
440 2047, // maxVertexInputAttributeOffset
441 2048, // maxVertexInputBindingStride
442 64, // maxVertexOutputComponents
443 0, // maxTessellationGenerationLevel
444 0, // maxTessellationPatchSize
445 0, // maxTessellationControlPerVertexInputComponents
446 0, // maxTessellationControlPerVertexOutputComponents
447 0, // maxTessellationControlPerPatchOutputComponents
448 0, // maxTessellationControlTotalOutputComponents
449 0, // maxTessellationEvaluationInputComponents
450 0, // maxTessellationEvaluationOutputComponents
451 0, // maxGeometryShaderInvocations
452 0, // maxGeometryInputComponents
453 0, // maxGeometryOutputComponents
454 0, // maxGeometryOutputVertices
455 0, // maxGeometryTotalOutputComponents
456 64, // maxFragmentInputComponents
457 4, // maxFragmentOutputAttachments
458 0, // maxFragmentDualSrcAttachments
459 4, // maxFragmentCombinedOutputResources
460 16384, // maxComputeSharedMemorySize
461 {65536, 65536, 65536}, // maxComputeWorkGroupCount[3]
462 128, // maxComputeWorkGroupInvocations
463 {128, 128, 64}, // maxComputeWorkGroupSize[3]
464 4, // subPixelPrecisionBits
465 4, // subTexelPrecisionBits
466 4, // mipmapPrecisionBits
467 UINT32_MAX, // maxDrawIndexedIndexValue
468 1, // maxDrawIndirectCount
469 2, // maxSamplerLodBias
470 1, // maxSamplerAnisotropy
471 1, // maxViewports
472 {4096, 4096}, // maxViewportDimensions[2]
473 {-8192.0f, 8191.0f}, // viewportBoundsRange[2]
474 0, // viewportSubPixelBits
475 64, // minMemoryMapAlignment
476 256, // minTexelBufferOffsetAlignment
477 256, // minUniformBufferOffsetAlignment
478 256, // minStorageBufferOffsetAlignment
479 -8, // minTexelOffset
480 7, // maxTexelOffset
481 0, // minTexelGatherOffset
482 0, // maxTexelGatherOffset
483 0.0f, // minInterpolationOffset
484 0.0f, // maxInterpolationOffset
485 0, // subPixelInterpolationOffsetBits
486 4096, // maxFramebufferWidth
487 4096, // maxFramebufferHeight
488 256, // maxFramebufferLayers
489 VK_SAMPLE_COUNT_1_BIT |
490 VK_SAMPLE_COUNT_4_BIT, // framebufferColorSampleCounts
491 VK_SAMPLE_COUNT_1_BIT |
492 VK_SAMPLE_COUNT_4_BIT, // framebufferDepthSampleCounts
493 VK_SAMPLE_COUNT_1_BIT |
494 VK_SAMPLE_COUNT_4_BIT, // framebufferStencilSampleCounts
495 VK_SAMPLE_COUNT_1_BIT |
496 VK_SAMPLE_COUNT_4_BIT, // framebufferNoAttachmentsSampleCounts
497 4, // maxColorAttachments
498 VK_SAMPLE_COUNT_1_BIT |
499 VK_SAMPLE_COUNT_4_BIT, // sampledImageColorSampleCounts
500 VK_SAMPLE_COUNT_1_BIT, // sampledImageIntegerSampleCounts
501 VK_SAMPLE_COUNT_1_BIT |
502 VK_SAMPLE_COUNT_4_BIT, // sampledImageDepthSampleCounts
503 VK_SAMPLE_COUNT_1_BIT |
504 VK_SAMPLE_COUNT_4_BIT, // sampledImageStencilSampleCounts
505 VK_SAMPLE_COUNT_1_BIT, // storageImageSampleCounts
506 1, // maxSampleMaskWords
507 VK_TRUE, // timestampComputeAndGraphics
508 1, // timestampPeriod
509 0, // maxClipDistances
510 0, // maxCullDistances
511 0, // maxCombinedClipAndCullDistances
512 2, // discreteQueuePriorities
513 {1.0f, 1.0f}, // pointSizeRange[2]
514 {1.0f, 1.0f}, // lineWidthRange[2]
515 0.0f, // pointSizeGranularity
516 0.0f, // lineWidthGranularity
517 VK_TRUE, // strictLines
518 VK_TRUE, // standardSampleLocations
519 1, // optimalBufferCopyOffsetAlignment
520 1, // optimalBufferCopyRowPitchAlignment
521 64, // nonCoherentAtomSize
522 };
523 }
524
GetPhysicalDeviceProperties2KHR(VkPhysicalDevice physical_device,VkPhysicalDeviceProperties2KHR * properties)525 void GetPhysicalDeviceProperties2KHR(VkPhysicalDevice physical_device,
526 VkPhysicalDeviceProperties2KHR* properties) {
527 GetPhysicalDeviceProperties(physical_device, &properties->properties);
528
529 while (properties->pNext) {
530 properties = reinterpret_cast<VkPhysicalDeviceProperties2KHR *>(properties->pNext);
531
532 #pragma clang diagnostic push
533 #pragma clang diagnostic ignored "-Wold-style-cast"
534 switch ((VkFlags)properties->sType) {
535 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID: {
536 VkPhysicalDevicePresentationPropertiesANDROID *presentation_properties =
537 reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(properties);
538 #pragma clang diagnostic pop
539
540 // Claim that we do all the right things for the loader to
541 // expose KHR_shared_presentable_image on our behalf.
542 presentation_properties->sharedImage = VK_TRUE;
543 } break;
544
545 default:
546 // Silently ignore other extension query structs
547 break;
548 }
549 }
550 }
551
GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,uint32_t * count,VkQueueFamilyProperties * properties)552 void GetPhysicalDeviceQueueFamilyProperties(
553 VkPhysicalDevice,
554 uint32_t* count,
555 VkQueueFamilyProperties* properties) {
556 if (!properties || *count > 1)
557 *count = 1;
558 if (properties && *count == 1) {
559 properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
560 VK_QUEUE_TRANSFER_BIT;
561 properties->queueCount = 1;
562 properties->timestampValidBits = 64;
563 properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
564 }
565 }
566
GetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physical_device,uint32_t * count,VkQueueFamilyProperties2KHR * properties)567 void GetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physical_device, uint32_t* count, VkQueueFamilyProperties2KHR* properties) {
568 // note: even though multiple structures, this is safe to forward in this
569 // case since we only expose one queue family.
570 GetPhysicalDeviceQueueFamilyProperties(physical_device, count, properties ? &properties->queueFamilyProperties : nullptr);
571 }
572
GetPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * properties)573 void GetPhysicalDeviceMemoryProperties(
574 VkPhysicalDevice,
575 VkPhysicalDeviceMemoryProperties* properties) {
576 properties->memoryTypeCount = 1;
577 properties->memoryTypes[0].propertyFlags =
578 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
579 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
580 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
581 VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
582 properties->memoryTypes[0].heapIndex = 0;
583 properties->memoryHeapCount = 1;
584 properties->memoryHeaps[0].size = kMaxDeviceMemory;
585 properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
586 }
587
GetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physical_device,VkPhysicalDeviceMemoryProperties2KHR * properties)588 void GetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physical_device, VkPhysicalDeviceMemoryProperties2KHR* properties) {
589 GetPhysicalDeviceMemoryProperties(physical_device, &properties->memoryProperties);
590 }
591
GetPhysicalDeviceFeatures(VkPhysicalDevice,VkPhysicalDeviceFeatures * features)592 void GetPhysicalDeviceFeatures(VkPhysicalDevice /*gpu*/,
593 VkPhysicalDeviceFeatures* features) {
594 *features = VkPhysicalDeviceFeatures{
595 VK_TRUE, // robustBufferAccess
596 VK_FALSE, // fullDrawIndexUint32
597 VK_FALSE, // imageCubeArray
598 VK_FALSE, // independentBlend
599 VK_FALSE, // geometryShader
600 VK_FALSE, // tessellationShader
601 VK_FALSE, // sampleRateShading
602 VK_FALSE, // dualSrcBlend
603 VK_FALSE, // logicOp
604 VK_FALSE, // multiDrawIndirect
605 VK_FALSE, // drawIndirectFirstInstance
606 VK_FALSE, // depthClamp
607 VK_FALSE, // depthBiasClamp
608 VK_FALSE, // fillModeNonSolid
609 VK_FALSE, // depthBounds
610 VK_FALSE, // wideLines
611 VK_FALSE, // largePoints
612 VK_FALSE, // alphaToOne
613 VK_FALSE, // multiViewport
614 VK_FALSE, // samplerAnisotropy
615 VK_FALSE, // textureCompressionETC2
616 VK_FALSE, // textureCompressionASTC_LDR
617 VK_FALSE, // textureCompressionBC
618 VK_FALSE, // occlusionQueryPrecise
619 VK_FALSE, // pipelineStatisticsQuery
620 VK_FALSE, // vertexPipelineStoresAndAtomics
621 VK_FALSE, // fragmentStoresAndAtomics
622 VK_FALSE, // shaderTessellationAndGeometryPointSize
623 VK_FALSE, // shaderImageGatherExtended
624 VK_FALSE, // shaderStorageImageExtendedFormats
625 VK_FALSE, // shaderStorageImageMultisample
626 VK_FALSE, // shaderStorageImageReadWithoutFormat
627 VK_FALSE, // shaderStorageImageWriteWithoutFormat
628 VK_FALSE, // shaderUniformBufferArrayDynamicIndexing
629 VK_FALSE, // shaderSampledImageArrayDynamicIndexing
630 VK_FALSE, // shaderStorageBufferArrayDynamicIndexing
631 VK_FALSE, // shaderStorageImageArrayDynamicIndexing
632 VK_FALSE, // shaderClipDistance
633 VK_FALSE, // shaderCullDistance
634 VK_FALSE, // shaderFloat64
635 VK_FALSE, // shaderInt64
636 VK_FALSE, // shaderInt16
637 VK_FALSE, // shaderResourceResidency
638 VK_FALSE, // shaderResourceMinLod
639 VK_FALSE, // sparseBinding
640 VK_FALSE, // sparseResidencyBuffer
641 VK_FALSE, // sparseResidencyImage2D
642 VK_FALSE, // sparseResidencyImage3D
643 VK_FALSE, // sparseResidency2Samples
644 VK_FALSE, // sparseResidency4Samples
645 VK_FALSE, // sparseResidency8Samples
646 VK_FALSE, // sparseResidency16Samples
647 VK_FALSE, // sparseResidencyAliased
648 VK_FALSE, // variableMultisampleRate
649 VK_FALSE, // inheritedQueries
650 };
651 }
652
GetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physical_device,VkPhysicalDeviceFeatures2KHR * features)653 void GetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physical_device, VkPhysicalDeviceFeatures2KHR* features) {
654 GetPhysicalDeviceFeatures(physical_device, &features->features);
655 }
656
657 // -----------------------------------------------------------------------------
658 // Device
659
CreateDevice(VkPhysicalDevice physical_device,const VkDeviceCreateInfo * create_info,const VkAllocationCallbacks * allocator,VkDevice * out_device)660 VkResult CreateDevice(VkPhysicalDevice physical_device,
661 const VkDeviceCreateInfo* create_info,
662 const VkAllocationCallbacks* allocator,
663 VkDevice* out_device) {
664 VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
665 if (!allocator)
666 allocator = &instance->allocator;
667 VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
668 allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
669 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
670 if (!device)
671 return VK_ERROR_OUT_OF_HOST_MEMORY;
672
673 device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
674 device->allocator = *allocator;
675 device->instance = instance;
676 device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
677 std::fill(device->next_handle.begin(), device->next_handle.end(),
678 UINT64_C(0));
679
680 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
681 if (strcmp(create_info->ppEnabledExtensionNames[i],
682 VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
683 ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
684 }
685 }
686
687 *out_device = device;
688 return VK_SUCCESS;
689 }
690
DestroyDevice(VkDevice device,const VkAllocationCallbacks *)691 void DestroyDevice(VkDevice device,
692 const VkAllocationCallbacks* /*allocator*/) {
693 if (!device)
694 return;
695 device->allocator.pfnFree(device->allocator.pUserData, device);
696 }
697
GetDeviceQueue(VkDevice device,uint32_t,uint32_t,VkQueue * queue)698 void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
699 *queue = &device->queue;
700 }
701
702 // -----------------------------------------------------------------------------
703 // CommandPool
704
705 struct CommandPool {
706 typedef VkCommandPool HandleType;
707 VkAllocationCallbacks allocator;
708 };
DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)709 DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
710
711 VkResult CreateCommandPool(VkDevice device,
712 const VkCommandPoolCreateInfo* /*create_info*/,
713 const VkAllocationCallbacks* allocator,
714 VkCommandPool* cmd_pool) {
715 if (!allocator)
716 allocator = &device->allocator;
717 CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
718 allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
719 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
720 if (!pool)
721 return VK_ERROR_OUT_OF_HOST_MEMORY;
722 pool->allocator = *allocator;
723 *cmd_pool = GetHandleToCommandPool(pool);
724 return VK_SUCCESS;
725 }
726
DestroyCommandPool(VkDevice,VkCommandPool cmd_pool,const VkAllocationCallbacks *)727 void DestroyCommandPool(VkDevice /*device*/,
728 VkCommandPool cmd_pool,
729 const VkAllocationCallbacks* /*allocator*/) {
730 CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
731 pool->allocator.pfnFree(pool->allocator.pUserData, pool);
732 }
733
734 // -----------------------------------------------------------------------------
735 // CmdBuffer
736
AllocateCommandBuffers(VkDevice,const VkCommandBufferAllocateInfo * alloc_info,VkCommandBuffer * cmdbufs)737 VkResult AllocateCommandBuffers(VkDevice /*device*/,
738 const VkCommandBufferAllocateInfo* alloc_info,
739 VkCommandBuffer* cmdbufs) {
740 VkResult result = VK_SUCCESS;
741 CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
742 std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
743 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
744 cmdbufs[i] =
745 static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
746 pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
747 alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
748 if (!cmdbufs[i]) {
749 result = VK_ERROR_OUT_OF_HOST_MEMORY;
750 break;
751 }
752 cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
753 }
754 if (result != VK_SUCCESS) {
755 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
756 if (!cmdbufs[i])
757 break;
758 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
759 }
760 }
761 return result;
762 }
763
FreeCommandBuffers(VkDevice,VkCommandPool cmd_pool,uint32_t count,const VkCommandBuffer * cmdbufs)764 void FreeCommandBuffers(VkDevice /*device*/,
765 VkCommandPool cmd_pool,
766 uint32_t count,
767 const VkCommandBuffer* cmdbufs) {
768 CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
769 for (uint32_t i = 0; i < count; i++)
770 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
771 }
772
773 // -----------------------------------------------------------------------------
774 // DeviceMemory
775
776 struct DeviceMemory {
777 typedef VkDeviceMemory HandleType;
778 VkDeviceSize size;
779 alignas(16) uint8_t data[0];
780 };
DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)781 DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
782
783 VkResult AllocateMemory(VkDevice device,
784 const VkMemoryAllocateInfo* alloc_info,
785 const VkAllocationCallbacks* allocator,
786 VkDeviceMemory* mem_handle) {
787 if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
788 return VK_ERROR_OUT_OF_HOST_MEMORY;
789 if (!allocator)
790 allocator = &device->allocator;
791
792 size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
793 DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
794 allocator->pUserData, size, alignof(DeviceMemory),
795 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
796 if (!mem)
797 return VK_ERROR_OUT_OF_HOST_MEMORY;
798 mem->size = size;
799 *mem_handle = GetHandleToDeviceMemory(mem);
800 return VK_SUCCESS;
801 }
802
FreeMemory(VkDevice device,VkDeviceMemory mem_handle,const VkAllocationCallbacks * allocator)803 void FreeMemory(VkDevice device,
804 VkDeviceMemory mem_handle,
805 const VkAllocationCallbacks* allocator) {
806 if (!allocator)
807 allocator = &device->allocator;
808 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
809 allocator->pfnFree(allocator->pUserData, mem);
810 }
811
MapMemory(VkDevice,VkDeviceMemory mem_handle,VkDeviceSize offset,VkDeviceSize,VkMemoryMapFlags,void ** out_ptr)812 VkResult MapMemory(VkDevice,
813 VkDeviceMemory mem_handle,
814 VkDeviceSize offset,
815 VkDeviceSize,
816 VkMemoryMapFlags,
817 void** out_ptr) {
818 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
819 *out_ptr = &mem->data[0] + offset;
820 return VK_SUCCESS;
821 }
822
823 // -----------------------------------------------------------------------------
824 // Buffer
825
826 struct Buffer {
827 typedef VkBuffer HandleType;
828 VkDeviceSize size;
829 };
DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)830 DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
831
832 VkResult CreateBuffer(VkDevice device,
833 const VkBufferCreateInfo* create_info,
834 const VkAllocationCallbacks* allocator,
835 VkBuffer* buffer_handle) {
836 ALOGW_IF(create_info->size > kMaxDeviceMemory,
837 "CreateBuffer: requested size 0x%" PRIx64
838 " exceeds max device memory size 0x%" PRIx64,
839 create_info->size, kMaxDeviceMemory);
840 if (!allocator)
841 allocator = &device->allocator;
842 Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
843 allocator->pUserData, sizeof(Buffer), alignof(Buffer),
844 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
845 if (!buffer)
846 return VK_ERROR_OUT_OF_HOST_MEMORY;
847 buffer->size = create_info->size;
848 *buffer_handle = GetHandleToBuffer(buffer);
849 return VK_SUCCESS;
850 }
851
GetBufferMemoryRequirements(VkDevice,VkBuffer buffer_handle,VkMemoryRequirements * requirements)852 void GetBufferMemoryRequirements(VkDevice,
853 VkBuffer buffer_handle,
854 VkMemoryRequirements* requirements) {
855 Buffer* buffer = GetBufferFromHandle(buffer_handle);
856 requirements->size = buffer->size;
857 requirements->alignment = 16; // allow fast Neon/SSE memcpy
858 requirements->memoryTypeBits = 0x1;
859 }
860
DestroyBuffer(VkDevice device,VkBuffer buffer_handle,const VkAllocationCallbacks * allocator)861 void DestroyBuffer(VkDevice device,
862 VkBuffer buffer_handle,
863 const VkAllocationCallbacks* allocator) {
864 if (!allocator)
865 allocator = &device->allocator;
866 Buffer* buffer = GetBufferFromHandle(buffer_handle);
867 allocator->pfnFree(allocator->pUserData, buffer);
868 }
869
870 // -----------------------------------------------------------------------------
871 // Image
872
873 struct Image {
874 typedef VkImage HandleType;
875 VkDeviceSize size;
876 };
DEFINE_OBJECT_HANDLE_CONVERSION(Image)877 DEFINE_OBJECT_HANDLE_CONVERSION(Image)
878
879 VkResult CreateImage(VkDevice device,
880 const VkImageCreateInfo* create_info,
881 const VkAllocationCallbacks* allocator,
882 VkImage* image_handle) {
883 if (create_info->imageType != VK_IMAGE_TYPE_2D ||
884 create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
885 create_info->mipLevels != 1) {
886 ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
887 create_info->imageType, create_info->format,
888 create_info->mipLevels);
889 return VK_ERROR_OUT_OF_HOST_MEMORY;
890 }
891
892 VkDeviceSize size =
893 VkDeviceSize(create_info->extent.width * create_info->extent.height) *
894 create_info->arrayLayers * create_info->samples * 4u;
895 ALOGW_IF(size > kMaxDeviceMemory,
896 "CreateImage: image size 0x%" PRIx64
897 " exceeds max device memory size 0x%" PRIx64,
898 size, kMaxDeviceMemory);
899
900 if (!allocator)
901 allocator = &device->allocator;
902 Image* image = static_cast<Image*>(allocator->pfnAllocation(
903 allocator->pUserData, sizeof(Image), alignof(Image),
904 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
905 if (!image)
906 return VK_ERROR_OUT_OF_HOST_MEMORY;
907 image->size = size;
908 *image_handle = GetHandleToImage(image);
909 return VK_SUCCESS;
910 }
911
GetImageMemoryRequirements(VkDevice,VkImage image_handle,VkMemoryRequirements * requirements)912 void GetImageMemoryRequirements(VkDevice,
913 VkImage image_handle,
914 VkMemoryRequirements* requirements) {
915 Image* image = GetImageFromHandle(image_handle);
916 requirements->size = image->size;
917 requirements->alignment = 16; // allow fast Neon/SSE memcpy
918 requirements->memoryTypeBits = 0x1;
919 }
920
DestroyImage(VkDevice device,VkImage image_handle,const VkAllocationCallbacks * allocator)921 void DestroyImage(VkDevice device,
922 VkImage image_handle,
923 const VkAllocationCallbacks* allocator) {
924 if (!allocator)
925 allocator = &device->allocator;
926 Image* image = GetImageFromHandle(image_handle);
927 allocator->pfnFree(allocator->pUserData, image);
928 }
929
GetSwapchainGrallocUsageANDROID(VkDevice,VkFormat,VkImageUsageFlags,int * grallocUsage)930 VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
931 VkFormat,
932 VkImageUsageFlags,
933 int* grallocUsage) {
934 // The null driver never reads or writes the gralloc buffer
935 *grallocUsage = 0;
936 return VK_SUCCESS;
937 }
938
GetSwapchainGrallocUsage2ANDROID(VkDevice,VkFormat,VkImageUsageFlags,VkSwapchainImageUsageFlagsANDROID,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)939 VkResult GetSwapchainGrallocUsage2ANDROID(VkDevice,
940 VkFormat,
941 VkImageUsageFlags,
942 VkSwapchainImageUsageFlagsANDROID,
943 uint64_t* grallocConsumerUsage,
944 uint64_t* grallocProducerUsage) {
945 // The null driver never reads or writes the gralloc buffer
946 *grallocConsumerUsage = 0;
947 *grallocProducerUsage = 0;
948 return VK_SUCCESS;
949 }
950
AcquireImageANDROID(VkDevice,VkImage,int fence,VkSemaphore,VkFence)951 VkResult AcquireImageANDROID(VkDevice,
952 VkImage,
953 int fence,
954 VkSemaphore,
955 VkFence) {
956 close(fence);
957 return VK_SUCCESS;
958 }
959
QueueSignalReleaseImageANDROID(VkQueue,uint32_t,const VkSemaphore *,VkImage,int * fence)960 VkResult QueueSignalReleaseImageANDROID(VkQueue,
961 uint32_t,
962 const VkSemaphore*,
963 VkImage,
964 int* fence) {
965 *fence = -1;
966 return VK_SUCCESS;
967 }
968
969 // -----------------------------------------------------------------------------
970 // No-op types
971
CreateBufferView(VkDevice device,const VkBufferViewCreateInfo *,const VkAllocationCallbacks *,VkBufferView * view)972 VkResult CreateBufferView(VkDevice device,
973 const VkBufferViewCreateInfo*,
974 const VkAllocationCallbacks* /*allocator*/,
975 VkBufferView* view) {
976 *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
977 return VK_SUCCESS;
978 }
979
CreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo *,const VkAllocationCallbacks *,VkDescriptorPool * pool)980 VkResult CreateDescriptorPool(VkDevice device,
981 const VkDescriptorPoolCreateInfo*,
982 const VkAllocationCallbacks* /*allocator*/,
983 VkDescriptorPool* pool) {
984 *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
985 return VK_SUCCESS;
986 }
987
AllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * alloc_info,VkDescriptorSet * descriptor_sets)988 VkResult AllocateDescriptorSets(VkDevice device,
989 const VkDescriptorSetAllocateInfo* alloc_info,
990 VkDescriptorSet* descriptor_sets) {
991 for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
992 descriptor_sets[i] =
993 AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
994 return VK_SUCCESS;
995 }
996
CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo *,const VkAllocationCallbacks *,VkDescriptorSetLayout * layout)997 VkResult CreateDescriptorSetLayout(VkDevice device,
998 const VkDescriptorSetLayoutCreateInfo*,
999 const VkAllocationCallbacks* /*allocator*/,
1000 VkDescriptorSetLayout* layout) {
1001 *layout = AllocHandle<VkDescriptorSetLayout>(
1002 device, HandleType::kDescriptorSetLayout);
1003 return VK_SUCCESS;
1004 }
1005
CreateEvent(VkDevice device,const VkEventCreateInfo *,const VkAllocationCallbacks *,VkEvent * event)1006 VkResult CreateEvent(VkDevice device,
1007 const VkEventCreateInfo*,
1008 const VkAllocationCallbacks* /*allocator*/,
1009 VkEvent* event) {
1010 *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
1011 return VK_SUCCESS;
1012 }
1013
CreateFence(VkDevice device,const VkFenceCreateInfo *,const VkAllocationCallbacks *,VkFence * fence)1014 VkResult CreateFence(VkDevice device,
1015 const VkFenceCreateInfo*,
1016 const VkAllocationCallbacks* /*allocator*/,
1017 VkFence* fence) {
1018 *fence = AllocHandle<VkFence>(device, HandleType::kFence);
1019 return VK_SUCCESS;
1020 }
1021
CreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo *,const VkAllocationCallbacks *,VkFramebuffer * framebuffer)1022 VkResult CreateFramebuffer(VkDevice device,
1023 const VkFramebufferCreateInfo*,
1024 const VkAllocationCallbacks* /*allocator*/,
1025 VkFramebuffer* framebuffer) {
1026 *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
1027 return VK_SUCCESS;
1028 }
1029
CreateImageView(VkDevice device,const VkImageViewCreateInfo *,const VkAllocationCallbacks *,VkImageView * view)1030 VkResult CreateImageView(VkDevice device,
1031 const VkImageViewCreateInfo*,
1032 const VkAllocationCallbacks* /*allocator*/,
1033 VkImageView* view) {
1034 *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
1035 return VK_SUCCESS;
1036 }
1037
CreateGraphicsPipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo *,const VkAllocationCallbacks *,VkPipeline * pipelines)1038 VkResult CreateGraphicsPipelines(VkDevice device,
1039 VkPipelineCache,
1040 uint32_t count,
1041 const VkGraphicsPipelineCreateInfo*,
1042 const VkAllocationCallbacks* /*allocator*/,
1043 VkPipeline* pipelines) {
1044 for (uint32_t i = 0; i < count; i++)
1045 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
1046 return VK_SUCCESS;
1047 }
1048
CreateComputePipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkComputePipelineCreateInfo *,const VkAllocationCallbacks *,VkPipeline * pipelines)1049 VkResult CreateComputePipelines(VkDevice device,
1050 VkPipelineCache,
1051 uint32_t count,
1052 const VkComputePipelineCreateInfo*,
1053 const VkAllocationCallbacks* /*allocator*/,
1054 VkPipeline* pipelines) {
1055 for (uint32_t i = 0; i < count; i++)
1056 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
1057 return VK_SUCCESS;
1058 }
1059
CreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo *,const VkAllocationCallbacks *,VkPipelineCache * cache)1060 VkResult CreatePipelineCache(VkDevice device,
1061 const VkPipelineCacheCreateInfo*,
1062 const VkAllocationCallbacks* /*allocator*/,
1063 VkPipelineCache* cache) {
1064 *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
1065 return VK_SUCCESS;
1066 }
1067
CreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo *,const VkAllocationCallbacks *,VkPipelineLayout * layout)1068 VkResult CreatePipelineLayout(VkDevice device,
1069 const VkPipelineLayoutCreateInfo*,
1070 const VkAllocationCallbacks* /*allocator*/,
1071 VkPipelineLayout* layout) {
1072 *layout =
1073 AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
1074 return VK_SUCCESS;
1075 }
1076
CreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo *,const VkAllocationCallbacks *,VkQueryPool * pool)1077 VkResult CreateQueryPool(VkDevice device,
1078 const VkQueryPoolCreateInfo*,
1079 const VkAllocationCallbacks* /*allocator*/,
1080 VkQueryPool* pool) {
1081 *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
1082 return VK_SUCCESS;
1083 }
1084
CreateRenderPass(VkDevice device,const VkRenderPassCreateInfo *,const VkAllocationCallbacks *,VkRenderPass * renderpass)1085 VkResult CreateRenderPass(VkDevice device,
1086 const VkRenderPassCreateInfo*,
1087 const VkAllocationCallbacks* /*allocator*/,
1088 VkRenderPass* renderpass) {
1089 *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
1090 return VK_SUCCESS;
1091 }
1092
CreateSampler(VkDevice device,const VkSamplerCreateInfo *,const VkAllocationCallbacks *,VkSampler * sampler)1093 VkResult CreateSampler(VkDevice device,
1094 const VkSamplerCreateInfo*,
1095 const VkAllocationCallbacks* /*allocator*/,
1096 VkSampler* sampler) {
1097 *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
1098 return VK_SUCCESS;
1099 }
1100
CreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo *,const VkAllocationCallbacks *,VkSemaphore * semaphore)1101 VkResult CreateSemaphore(VkDevice device,
1102 const VkSemaphoreCreateInfo*,
1103 const VkAllocationCallbacks* /*allocator*/,
1104 VkSemaphore* semaphore) {
1105 *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
1106 return VK_SUCCESS;
1107 }
1108
CreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo *,const VkAllocationCallbacks *,VkShaderModule * module)1109 VkResult CreateShaderModule(VkDevice device,
1110 const VkShaderModuleCreateInfo*,
1111 const VkAllocationCallbacks* /*allocator*/,
1112 VkShaderModule* module) {
1113 *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
1114 return VK_SUCCESS;
1115 }
1116
CreateDebugReportCallbackEXT(VkInstance instance,const VkDebugReportCallbackCreateInfoEXT *,const VkAllocationCallbacks *,VkDebugReportCallbackEXT * callback)1117 VkResult CreateDebugReportCallbackEXT(VkInstance instance,
1118 const VkDebugReportCallbackCreateInfoEXT*,
1119 const VkAllocationCallbacks*,
1120 VkDebugReportCallbackEXT* callback) {
1121 *callback = AllocHandle<VkDebugReportCallbackEXT>(
1122 instance, HandleType::kDebugReportCallbackEXT);
1123 return VK_SUCCESS;
1124 }
1125
CreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 *,const VkAllocationCallbacks *,VkRenderPass * pRenderPass)1126 VkResult CreateRenderPass2(VkDevice device,
1127 const VkRenderPassCreateInfo2*,
1128 const VkAllocationCallbacks* /*allocator*/,
1129 VkRenderPass* pRenderPass) {
1130 *pRenderPass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
1131 return VK_SUCCESS;
1132 }
1133
1134 // -----------------------------------------------------------------------------
1135 // No-op entrypoints
1136
1137 // clang-format off
1138 #pragma clang diagnostic push
1139 #pragma clang diagnostic ignored "-Wunused-parameter"
1140
GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1141 void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
1142 ALOGV("TODO: vk%s", __FUNCTION__);
1143 }
1144
GetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2KHR * pFormatProperties)1145 void GetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) {
1146 ALOGV("TODO: vk%s", __FUNCTION__);
1147 }
1148
GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1149 VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
1150 ALOGV("TODO: vk%s", __FUNCTION__);
1151 return VK_SUCCESS;
1152 }
1153
GetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2KHR * pImageFormatInfo,VkImageFormatProperties2KHR * pImageFormatProperties)1154 VkResult GetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
1155 const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo,
1156 VkImageFormatProperties2KHR* pImageFormatProperties) {
1157 ALOGV("TODO: vk%s", __FUNCTION__);
1158 return VK_SUCCESS;
1159 }
1160
EnumerateInstanceLayerProperties(uint32_t * pCount,VkLayerProperties * pProperties)1161 VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
1162 ALOGV("TODO: vk%s", __FUNCTION__);
1163 return VK_SUCCESS;
1164 }
1165
QueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmitInfo,VkFence fence)1166 VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
1167 return VK_SUCCESS;
1168 }
1169
QueueWaitIdle(VkQueue queue)1170 VkResult QueueWaitIdle(VkQueue queue) {
1171 ALOGV("TODO: vk%s", __FUNCTION__);
1172 return VK_SUCCESS;
1173 }
1174
DeviceWaitIdle(VkDevice device)1175 VkResult DeviceWaitIdle(VkDevice device) {
1176 ALOGV("TODO: vk%s", __FUNCTION__);
1177 return VK_SUCCESS;
1178 }
1179
UnmapMemory(VkDevice device,VkDeviceMemory mem)1180 void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
1181 }
1182
FlushMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)1183 VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1184 ALOGV("TODO: vk%s", __FUNCTION__);
1185 return VK_SUCCESS;
1186 }
1187
InvalidateMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)1188 VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1189 ALOGV("TODO: vk%s", __FUNCTION__);
1190 return VK_SUCCESS;
1191 }
1192
GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)1193 void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
1194 ALOGV("TODO: vk%s", __FUNCTION__);
1195 }
1196
BindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memOffset)1197 VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
1198 return VK_SUCCESS;
1199 }
1200
BindImageMemory(VkDevice device,VkImage image,VkDeviceMemory mem,VkDeviceSize memOffset)1201 VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
1202 return VK_SUCCESS;
1203 }
1204
GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pNumRequirements,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1205 void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
1206 ALOGV("TODO: vk%s", __FUNCTION__);
1207 }
1208
GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pNumProperties,VkSparseImageFormatProperties * pProperties)1209 void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
1210 ALOGV("TODO: vk%s", __FUNCTION__);
1211 }
1212
GetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,VkPhysicalDeviceSparseImageFormatInfo2KHR const * pInfo,unsigned int * pNumProperties,VkSparseImageFormatProperties2KHR * pProperties)1213 void GetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
1214 VkPhysicalDeviceSparseImageFormatInfo2KHR const* pInfo,
1215 unsigned int* pNumProperties,
1216 VkSparseImageFormatProperties2KHR* pProperties) {
1217 ALOGV("TODO: vk%s", __FUNCTION__);
1218 }
1219
1220
QueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1221 VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
1222 ALOGV("TODO: vk%s", __FUNCTION__);
1223 return VK_SUCCESS;
1224 }
1225
DestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * allocator)1226 void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
1227 }
1228
ResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1229 VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
1230 return VK_SUCCESS;
1231 }
1232
GetFenceStatus(VkDevice device,VkFence fence)1233 VkResult GetFenceStatus(VkDevice device, VkFence fence) {
1234 ALOGV("TODO: vk%s", __FUNCTION__);
1235 return VK_SUCCESS;
1236 }
1237
WaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1238 VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
1239 return VK_SUCCESS;
1240 }
1241
DestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * allocator)1242 void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
1243 }
1244
DestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * allocator)1245 void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
1246 }
1247
GetEventStatus(VkDevice device,VkEvent event)1248 VkResult GetEventStatus(VkDevice device, VkEvent event) {
1249 ALOGV("TODO: vk%s", __FUNCTION__);
1250 return VK_SUCCESS;
1251 }
1252
SetEvent(VkDevice device,VkEvent event)1253 VkResult SetEvent(VkDevice device, VkEvent event) {
1254 ALOGV("TODO: vk%s", __FUNCTION__);
1255 return VK_SUCCESS;
1256 }
1257
ResetEvent(VkDevice device,VkEvent event)1258 VkResult ResetEvent(VkDevice device, VkEvent event) {
1259 ALOGV("TODO: vk%s", __FUNCTION__);
1260 return VK_SUCCESS;
1261 }
1262
DestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * allocator)1263 void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
1264 }
1265
GetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1266 VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1267 ALOGV("TODO: vk%s", __FUNCTION__);
1268 return VK_SUCCESS;
1269 }
1270
DestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * allocator)1271 void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
1272 }
1273
GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1274 void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
1275 ALOGV("TODO: vk%s", __FUNCTION__);
1276 }
1277
DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * allocator)1278 void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
1279 }
1280
DestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * allocator)1281 void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
1282 }
1283
DestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * allocator)1284 void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
1285 }
1286
GetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)1287 VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
1288 ALOGV("TODO: vk%s", __FUNCTION__);
1289 return VK_SUCCESS;
1290 }
1291
MergePipelineCaches(VkDevice device,VkPipelineCache destCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)1292 VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
1293 ALOGV("TODO: vk%s", __FUNCTION__);
1294 return VK_SUCCESS;
1295 }
1296
DestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * allocator)1297 void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
1298 }
1299
DestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * allocator)1300 void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
1301 }
1302
DestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * allocator)1303 void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
1304 }
1305
DestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * allocator)1306 void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
1307 }
1308
DestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * allocator)1309 void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
1310 }
1311
ResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1312 VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
1313 ALOGV("TODO: vk%s", __FUNCTION__);
1314 return VK_SUCCESS;
1315 }
1316
UpdateDescriptorSets(VkDevice device,uint32_t writeCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t copyCount,const VkCopyDescriptorSet * pDescriptorCopies)1317 void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
1318 ALOGV("TODO: vk%s", __FUNCTION__);
1319 }
1320
FreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1321 VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
1322 ALOGV("TODO: vk%s", __FUNCTION__);
1323 return VK_SUCCESS;
1324 }
1325
DestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * allocator)1326 void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1327 }
1328
DestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * allocator)1329 void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1330 }
1331
GetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)1332 void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1333 ALOGV("TODO: vk%s", __FUNCTION__);
1334 }
1335
ResetCommandPool(VkDevice device,VkCommandPool cmdPool,VkCommandPoolResetFlags flags)1336 VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1337 ALOGV("TODO: vk%s", __FUNCTION__);
1338 return VK_SUCCESS;
1339 }
1340
BeginCommandBuffer(VkCommandBuffer cmdBuffer,const VkCommandBufferBeginInfo * pBeginInfo)1341 VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1342 return VK_SUCCESS;
1343 }
1344
EndCommandBuffer(VkCommandBuffer cmdBuffer)1345 VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1346 return VK_SUCCESS;
1347 }
1348
ResetCommandBuffer(VkCommandBuffer cmdBuffer,VkCommandBufferResetFlags flags)1349 VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1350 ALOGV("TODO: vk%s", __FUNCTION__);
1351 return VK_SUCCESS;
1352 }
1353
CmdBindPipeline(VkCommandBuffer cmdBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)1354 void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1355 }
1356
CmdSetViewport(VkCommandBuffer cmdBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1357 void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1358 }
1359
CmdSetScissor(VkCommandBuffer cmdBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)1360 void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1361 }
1362
CmdSetLineWidth(VkCommandBuffer cmdBuffer,float lineWidth)1363 void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1364 }
1365
CmdSetDepthBias(VkCommandBuffer cmdBuffer,float depthBias,float depthBiasClamp,float slopeScaledDepthBias)1366 void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1367 }
1368
CmdSetBlendConstants(VkCommandBuffer cmdBuffer,const float blendConst[4])1369 void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1370 }
1371
CmdSetDepthBounds(VkCommandBuffer cmdBuffer,float minDepthBounds,float maxDepthBounds)1372 void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1373 }
1374
CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilCompareMask)1375 void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1376 }
1377
CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilWriteMask)1378 void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1379 }
1380
CmdSetStencilReference(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilReference)1381 void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1382 }
1383
CmdBindDescriptorSets(VkCommandBuffer cmdBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t setCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)1384 void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1385 }
1386
CmdBindIndexBuffer(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)1387 void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1388 }
1389
CmdBindVertexBuffers(VkCommandBuffer cmdBuffer,uint32_t startBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)1390 void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1391 }
1392
CmdDraw(VkCommandBuffer cmdBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1393 void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1394 }
1395
CmdDrawIndexed(VkCommandBuffer cmdBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1396 void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1397 }
1398
CmdDrawIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t count,uint32_t stride)1399 void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1400 }
1401
CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t count,uint32_t stride)1402 void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1403 }
1404
CmdDispatch(VkCommandBuffer cmdBuffer,uint32_t x,uint32_t y,uint32_t z)1405 void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1406 }
1407
CmdDispatchIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset)1408 void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1409 }
1410
CmdCopyBuffer(VkCommandBuffer cmdBuffer,VkBuffer srcBuffer,VkBuffer destBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)1411 void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1412 }
1413
CmdCopyImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1414 void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1415 }
1416
CmdBlitImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)1417 void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1418 }
1419
CmdCopyBufferToImage(VkCommandBuffer cmdBuffer,VkBuffer srcBuffer,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)1420 void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1421 }
1422
CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer destBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)1423 void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1424 }
1425
CmdUpdateBuffer(VkCommandBuffer cmdBuffer,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize dataSize,const void * pData)1426 void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const void* pData) {
1427 }
1428
CmdFillBuffer(VkCommandBuffer cmdBuffer,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize fillSize,uint32_t data)1429 void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1430 }
1431
CmdClearColorImage(VkCommandBuffer cmdBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1432 void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1433 }
1434
CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1435 void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1436 }
1437
CmdClearAttachments(VkCommandBuffer cmdBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)1438 void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1439 }
1440
CmdResolveImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1441 void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1442 }
1443
CmdSetEvent(VkCommandBuffer cmdBuffer,VkEvent event,VkPipelineStageFlags stageMask)1444 void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1445 }
1446
CmdResetEvent(VkCommandBuffer cmdBuffer,VkEvent event,VkPipelineStageFlags stageMask)1447 void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1448 }
1449
CmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1450 void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1451 }
1452
CmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1453 void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1454 }
1455
CmdBeginQuery(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t slot,VkQueryControlFlags flags)1456 void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1457 }
1458
CmdEndQuery(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t slot)1459 void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1460 }
1461
CmdResetQueryPool(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount)1462 void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1463 }
1464
CmdWriteTimestamp(VkCommandBuffer cmdBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot)1465 void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1466 }
1467
CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize destStride,VkQueryResultFlags flags)1468 void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1469 }
1470
CmdPushConstants(VkCommandBuffer cmdBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t start,uint32_t length,const void * values)1471 void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1472 }
1473
CmdBeginRenderPass(VkCommandBuffer cmdBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)1474 void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1475 }
1476
CmdNextSubpass(VkCommandBuffer cmdBuffer,VkSubpassContents contents)1477 void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1478 }
1479
CmdEndRenderPass(VkCommandBuffer cmdBuffer)1480 void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1481 }
1482
CmdExecuteCommands(VkCommandBuffer cmdBuffer,uint32_t cmdBuffersCount,const VkCommandBuffer * pCmdBuffers)1483 void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1484 }
1485
DestroyDebugReportCallbackEXT(VkInstance instance,VkDebugReportCallbackEXT callback,const VkAllocationCallbacks * pAllocator)1486 void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1487 }
1488
DebugReportMessageEXT(VkInstance instance,VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage)1489 void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1490 }
1491
BindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1492 VkResult BindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) {
1493 return VK_SUCCESS;
1494 }
1495
BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)1496 VkResult BindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) {
1497 return VK_SUCCESS;
1498 }
1499
GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)1500 void GetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
1501 }
1502
CmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)1503 void CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {
1504 }
1505
CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1506 void CmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {
1507 }
1508
EnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)1509 VkResult EnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
1510 return VK_SUCCESS;
1511 }
1512
GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1513 void GetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {
1514 }
1515
GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1516 void GetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {
1517 }
1518
GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1519 void GetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
1520 }
1521
GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)1522 void GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {
1523 }
1524
GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1525 void GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {
1526 }
1527
GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)1528 void GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {
1529 }
1530
GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1531 VkResult GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) {
1532 return VK_SUCCESS;
1533 }
1534
GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1535 void GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {
1536 }
1537
GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1538 void GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
1539 }
1540
GetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)1541 void GetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {
1542 }
1543
TrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)1544 void TrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {
1545 }
1546
GetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)1547 void GetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {
1548 }
1549
CreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)1550 VkResult CreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {
1551 return VK_SUCCESS;
1552 }
1553
DestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)1554 void DestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {
1555 }
1556
CreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)1557 VkResult CreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
1558 return VK_SUCCESS;
1559 }
1560
DestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)1561 void DestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
1562 }
1563
UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1564 void UpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {
1565 }
1566
GetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1567 void GetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {
1568 }
1569
GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)1570 void GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {
1571 }
1572
GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)1573 void GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
1574 }
1575
GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)1576 void GetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {
1577 }
1578
ResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)1579 void ResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {
1580 ALOGV("TODO: vk%s", __FUNCTION__);
1581 }
1582
CmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)1583 void CmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo) {
1584 }
1585
CmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfo * pSubpassBeginInfo,const VkSubpassEndInfo * pSubpassEndInfo)1586 void CmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo) {
1587 }
1588
CmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfo * pSubpassEndInfo)1589 void CmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo) {
1590 }
1591
GetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue)1592 VkResult GetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) {
1593 ALOGV("TODO: vk%s", __FUNCTION__);
1594 return VK_SUCCESS;
1595 }
1596
WaitSemaphores(VkDevice device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1597 VkResult WaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout) {
1598 ALOGV("TODO: vk%s", __FUNCTION__);
1599 return VK_SUCCESS;
1600 }
1601
SignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo)1602 VkResult SignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo) {
1603 ALOGV("TODO: vk%s", __FUNCTION__);
1604 return VK_SUCCESS;
1605 }
1606
CmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1607 void CmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
1608 }
1609
CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1610 void CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
1611 }
1612
GetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1613 uint64_t GetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) {
1614 ALOGV("TODO: vk%s", __FUNCTION__);
1615 return 0;
1616 }
1617
GetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1618 VkDeviceAddress GetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) {
1619 ALOGV("TODO: vk%s", __FUNCTION__);
1620 return (VkDeviceAddress)0;
1621 }
1622
GetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)1623 uint64_t GetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) {
1624 ALOGV("TODO: vk%s", __FUNCTION__);
1625 return 0;
1626 }
1627
1628 #pragma clang diagnostic pop
1629 // clang-format on
1630
1631 } // namespace null_driver
1632