1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2015 Google Inc.
6 * Copyright (c) 2023 LunarG, Inc.
7 * Copyright (c) 2023 Nintendo
8 *
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
12 *
13 * http://www.apache.org/licenses/LICENSE-2.0
14 *
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
20 *
21 *//*!
22 * \file
23 * \brief Null (do-nothing) Vulkan implementation.
24 *//*--------------------------------------------------------------------*/
25
26 #include "vkNullDriver.hpp"
27 #include "vkPlatform.hpp"
28 #include "vkImageUtil.hpp"
29 #include "vkQueryUtil.hpp"
30 #include "tcuFunctionLibrary.hpp"
31 #include "deMemory.h"
32
33 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && \
34 (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
35 #define USE_ANDROID_O_HARDWARE_BUFFER
36 #endif
37 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
38 #include <android/hardware_buffer.h>
39 #endif
40
41 #include <stdexcept>
42 #include <algorithm>
43
44 namespace vk
45 {
46
47 namespace
48 {
49
50 using std::vector;
51
52 // Memory management
53
54 template <typename T>
allocateSystemMem(const VkAllocationCallbacks * pAllocator,VkSystemAllocationScope scope)55 void *allocateSystemMem(const VkAllocationCallbacks *pAllocator, VkSystemAllocationScope scope)
56 {
57 void *ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void *), scope);
58 if (!ptr)
59 throw std::bad_alloc();
60 return ptr;
61 }
62
freeSystemMem(const VkAllocationCallbacks * pAllocator,void * mem)63 void freeSystemMem(const VkAllocationCallbacks *pAllocator, void *mem)
64 {
65 pAllocator->pfnFree(pAllocator->pUserData, mem);
66 }
67
68 template <typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)69 Handle allocateHandle(Parent parent, const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
70 {
71 Object *obj = nullptr;
72
73 if (pAllocator)
74 {
75 void *mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
76 try
77 {
78 obj = new (mem) Object(parent, pCreateInfo);
79 DE_ASSERT(obj == mem);
80 }
81 catch (...)
82 {
83 pAllocator->pfnFree(pAllocator->pUserData, mem);
84 throw;
85 }
86 }
87 else
88 obj = new Object(parent, pCreateInfo);
89
90 return reinterpret_cast<Handle>(obj);
91 }
92
93 template <typename Object, typename Handle, typename CreateInfo>
allocateHandle(const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)94 Handle allocateHandle(const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
95 {
96 Object *obj = nullptr;
97
98 if (pAllocator)
99 {
100 void *mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
101 try
102 {
103 obj = new (mem) Object(pCreateInfo);
104 DE_ASSERT(obj == mem);
105 }
106 catch (...)
107 {
108 pAllocator->pfnFree(pAllocator->pUserData, mem);
109 throw;
110 }
111 }
112 else
113 obj = new Object(pCreateInfo);
114
115 return reinterpret_cast<Handle>(obj);
116 }
117
118 template <typename Object, typename Handle, typename Parent>
allocateHandle(Parent parent,const VkAllocationCallbacks * pAllocator)119 Handle allocateHandle(Parent parent, const VkAllocationCallbacks *pAllocator)
120 {
121 Object *obj = nullptr;
122
123 if (pAllocator)
124 {
125 void *mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
126 try
127 {
128 obj = new (mem) Object(parent);
129 DE_ASSERT(obj == mem);
130 }
131 catch (...)
132 {
133 pAllocator->pfnFree(pAllocator->pUserData, mem);
134 throw;
135 }
136 }
137 else
138 obj = new Object(parent);
139
140 return reinterpret_cast<Handle>(obj);
141 }
142
143 template <typename Object, typename Handle>
freeHandle(Handle handle,const VkAllocationCallbacks * pAllocator)144 void freeHandle(Handle handle, const VkAllocationCallbacks *pAllocator)
145 {
146 Object *obj = reinterpret_cast<Object *>(handle);
147
148 if (pAllocator)
149 {
150 obj->~Object();
151 freeSystemMem(pAllocator, reinterpret_cast<void *>(obj));
152 }
153 else
154 delete obj;
155 }
156
157 template <typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandleArray(Parent parent,VkPipelineCache pipelineCache,uint32_t createInfoCount,const CreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,Handle * pHandles)158 void allocateNonDispHandleArray(Parent parent, VkPipelineCache pipelineCache, uint32_t createInfoCount,
159 const CreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator,
160 Handle *pHandles)
161 {
162 (void)pipelineCache;
163 for (uint32_t i = 0; i < createInfoCount; i++)
164 {
165 Object *const obj = allocateHandle<Object, Object *>(parent, &pCreateInfos[i], pAllocator);
166 pHandles[i] = Handle(obj);
167 }
168 }
169
170 template <typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)171 Handle allocateNonDispHandle(Parent parent, const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
172 {
173 Object *const obj = allocateHandle<Object, Object *>(parent, pCreateInfo, pAllocator);
174 return Handle(obj);
175 }
176
177 template <typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)178 Handle allocateNonDispHandle(Parent parent, const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
179 {
180 return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
181 }
182
183 template <typename Object, typename Handle, typename Parent>
allocateNonDispHandle(Parent parent,const VkAllocationCallbacks * pAllocator)184 Handle allocateNonDispHandle(Parent parent, const VkAllocationCallbacks *pAllocator)
185 {
186 Object *const obj = allocateHandle<Object, Object *>(parent, pAllocator);
187 return Handle(obj);
188 }
189
190 template <typename Object, typename Handle>
freeNonDispHandle(Handle handle,const VkAllocationCallbacks * pAllocator)191 void freeNonDispHandle(Handle handle, const VkAllocationCallbacks *pAllocator)
192 {
193 freeHandle<Object>(handle.template as<Object>(), pAllocator);
194 }
195
196 // Object definitions
197
198 #define VK_NULL_RETURN(STMT) \
199 do \
200 { \
201 try \
202 { \
203 STMT; \
204 return VK_SUCCESS; \
205 } \
206 catch (const std::bad_alloc &) \
207 { \
208 return VK_ERROR_OUT_OF_HOST_MEMORY; \
209 } \
210 catch (VkResult res) \
211 { \
212 return res; \
213 } \
214 } while (false)
215
216 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
217 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) \
218 { \
219 #NAME, (deFunctionPtr)FUNC \
220 } // NOLINT(FUNC)
221
222 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \
223 struct NAME \
224 { \
225 NAME(VkDevice, const Vk##NAME##CreateInfo *) \
226 { \
227 } \
228 }
229
230 #define VK_NULL_DEFINE_OBJ_WITH_POSTFIX(DEVICE_OR_INSTANCE, NAME, POSTFIX) \
231 struct NAME##POSTFIX \
232 { \
233 NAME##POSTFIX(DEVICE_OR_INSTANCE, const Vk##NAME##CreateInfo##POSTFIX *) \
234 { \
235 } \
236 };
237
238 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
239 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
240 VK_NULL_DEFINE_DEVICE_OBJ(Event);
241 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
242 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
243 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
244 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
245 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
246 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
247 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
248 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
249 VK_NULL_DEFINE_DEVICE_OBJ(SamplerYcbcrConversion);
250 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Swapchain, KHR)
251 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugUtilsMessenger, EXT)
252
253 #ifdef CTS_USES_VULKANSC
254 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, SemaphoreSciSyncPool, NV)
255 #else
256 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
257 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorUpdateTemplate);
258 VK_NULL_DEFINE_DEVICE_OBJ(PrivateDataSlot);
259 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugReportCallback, EXT)
260 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuModule, NVX)
261 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuFunction, NVX)
262 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CudaModule, NV)
263 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CudaFunction, NV)
264 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Micromap, EXT)
265 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, OpticalFlowSession, NV)
266 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, NV)
267 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, NV)
268 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, KHR)
269 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSession, KHR)
270 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSessionParameters, KHR)
271 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, ValidationCache, EXT)
272 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, BufferCollection, FUCHSIA)
273 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Shader, EXT)
274 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, EXT);
275 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectExecutionSet, EXT);
276 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, PipelineBinary, KHR)
277 #endif // CTS_USES_VULKANSC
278
279 class Instance
280 {
281 public:
282 Instance(const VkInstanceCreateInfo *instanceInfo);
~Instance(void)283 ~Instance(void)
284 {
285 }
286
getProcAddr(const char * name) const287 PFN_vkVoidFunction getProcAddr(const char *name) const
288 {
289 return (PFN_vkVoidFunction)m_functions.getFunction(name);
290 }
291
292 private:
293 const tcu::StaticFunctionLibrary m_functions;
294 };
295
296 class SurfaceKHR
297 {
298 public:
299 #ifndef CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkXlibSurfaceCreateInfoKHR *)300 SurfaceKHR(VkInstance, const VkXlibSurfaceCreateInfoKHR *)
301 {
302 }
SurfaceKHR(VkInstance,const VkXcbSurfaceCreateInfoKHR *)303 SurfaceKHR(VkInstance, const VkXcbSurfaceCreateInfoKHR *)
304 {
305 }
SurfaceKHR(VkInstance,const VkWaylandSurfaceCreateInfoKHR *)306 SurfaceKHR(VkInstance, const VkWaylandSurfaceCreateInfoKHR *)
307 {
308 }
SurfaceKHR(VkInstance,const VkAndroidSurfaceCreateInfoKHR *)309 SurfaceKHR(VkInstance, const VkAndroidSurfaceCreateInfoKHR *)
310 {
311 }
SurfaceKHR(VkInstance,const VkWin32SurfaceCreateInfoKHR *)312 SurfaceKHR(VkInstance, const VkWin32SurfaceCreateInfoKHR *)
313 {
314 }
SurfaceKHR(VkInstance,const VkViSurfaceCreateInfoNN *)315 SurfaceKHR(VkInstance, const VkViSurfaceCreateInfoNN *)
316 {
317 }
SurfaceKHR(VkInstance,const VkIOSSurfaceCreateInfoMVK *)318 SurfaceKHR(VkInstance, const VkIOSSurfaceCreateInfoMVK *)
319 {
320 }
SurfaceKHR(VkInstance,const VkMacOSSurfaceCreateInfoMVK *)321 SurfaceKHR(VkInstance, const VkMacOSSurfaceCreateInfoMVK *)
322 {
323 }
SurfaceKHR(VkInstance,const VkImagePipeSurfaceCreateInfoFUCHSIA *)324 SurfaceKHR(VkInstance, const VkImagePipeSurfaceCreateInfoFUCHSIA *)
325 {
326 }
SurfaceKHR(VkInstance,const VkStreamDescriptorSurfaceCreateInfoGGP *)327 SurfaceKHR(VkInstance, const VkStreamDescriptorSurfaceCreateInfoGGP *)
328 {
329 }
SurfaceKHR(VkInstance,const VkMetalSurfaceCreateInfoEXT *)330 SurfaceKHR(VkInstance, const VkMetalSurfaceCreateInfoEXT *)
331 {
332 }
SurfaceKHR(VkInstance,const VkScreenSurfaceCreateInfoQNX *)333 SurfaceKHR(VkInstance, const VkScreenSurfaceCreateInfoQNX *)
334 {
335 }
336 #endif // CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkDisplaySurfaceCreateInfoKHR *)337 SurfaceKHR(VkInstance, const VkDisplaySurfaceCreateInfoKHR *)
338 {
339 }
SurfaceKHR(VkInstance,const VkHeadlessSurfaceCreateInfoEXT *)340 SurfaceKHR(VkInstance, const VkHeadlessSurfaceCreateInfoEXT *)
341 {
342 }
~SurfaceKHR(void)343 ~SurfaceKHR(void)
344 {
345 }
346 };
347
348 class DisplayModeKHR
349 {
350 public:
DisplayModeKHR(VkDisplayKHR,const VkDisplayModeCreateInfoKHR *)351 DisplayModeKHR(VkDisplayKHR, const VkDisplayModeCreateInfoKHR *)
352 {
353 }
~DisplayModeKHR(void)354 ~DisplayModeKHR(void)
355 {
356 }
357 };
358
359 class Device
360 {
361 public:
362 Device(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *deviceInfo);
~Device(void)363 ~Device(void)
364 {
365 }
366
getProcAddr(const char * name) const367 PFN_vkVoidFunction getProcAddr(const char *name) const
368 {
369 return (PFN_vkVoidFunction)m_functions.getFunction(name);
370 }
371
372 private:
373 const tcu::StaticFunctionLibrary m_functions;
374 };
375
376 class Pipeline
377 {
378 public:
Pipeline(VkDevice,const VkGraphicsPipelineCreateInfo *)379 Pipeline(VkDevice, const VkGraphicsPipelineCreateInfo *)
380 {
381 }
Pipeline(VkDevice,const VkComputePipelineCreateInfo *)382 Pipeline(VkDevice, const VkComputePipelineCreateInfo *)
383 {
384 }
385 #ifndef CTS_USES_VULKANSC
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoNV *)386 Pipeline(VkDevice, const VkRayTracingPipelineCreateInfoNV *)
387 {
388 }
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoKHR *)389 Pipeline(VkDevice, const VkRayTracingPipelineCreateInfoKHR *)
390 {
391 }
Pipeline(VkDevice,const VkExecutionGraphPipelineCreateInfoAMDX *)392 Pipeline(VkDevice, const VkExecutionGraphPipelineCreateInfoAMDX *)
393 {
394 }
395 #endif // CTS_USES_VULKANSC
396 };
397
398 class RenderPass
399 {
400 public:
RenderPass(VkDevice,const VkRenderPassCreateInfo *)401 RenderPass(VkDevice, const VkRenderPassCreateInfo *)
402 {
403 }
RenderPass(VkDevice,const VkRenderPassCreateInfo2 *)404 RenderPass(VkDevice, const VkRenderPassCreateInfo2 *)
405 {
406 }
407 };
408
409 class Buffer
410 {
411 public:
Buffer(VkDevice,const VkBufferCreateInfo * pCreateInfo)412 Buffer(VkDevice, const VkBufferCreateInfo *pCreateInfo) : m_size(pCreateInfo->size)
413 {
414 }
415
getSize(void) const416 VkDeviceSize getSize(void) const
417 {
418 return m_size;
419 }
420
421 private:
422 const VkDeviceSize m_size;
423 };
424
getExternalTypesHandle(const VkImageCreateInfo * pCreateInfo)425 VkExternalMemoryHandleTypeFlags getExternalTypesHandle(const VkImageCreateInfo *pCreateInfo)
426 {
427 const VkExternalMemoryImageCreateInfo *const externalInfo =
428 findStructure<VkExternalMemoryImageCreateInfo>(pCreateInfo->pNext);
429
430 return externalInfo ? externalInfo->handleTypes : 0u;
431 }
432
433 class Image
434 {
435 public:
Image(VkDevice,const VkImageCreateInfo * pCreateInfo)436 Image(VkDevice, const VkImageCreateInfo *pCreateInfo)
437 : m_imageType(pCreateInfo->imageType)
438 , m_format(pCreateInfo->format)
439 , m_extent(pCreateInfo->extent)
440 , m_arrayLayers(pCreateInfo->arrayLayers)
441 , m_samples(pCreateInfo->samples)
442 , m_usage(pCreateInfo->usage)
443 , m_flags(pCreateInfo->flags)
444 , m_externalHandleTypes(getExternalTypesHandle(pCreateInfo))
445 {
446 }
447
getImageType(void) const448 VkImageType getImageType(void) const
449 {
450 return m_imageType;
451 }
getFormat(void) const452 VkFormat getFormat(void) const
453 {
454 return m_format;
455 }
getExtent(void) const456 VkExtent3D getExtent(void) const
457 {
458 return m_extent;
459 }
getArrayLayers(void) const460 uint32_t getArrayLayers(void) const
461 {
462 return m_arrayLayers;
463 }
getSamples(void) const464 VkSampleCountFlagBits getSamples(void) const
465 {
466 return m_samples;
467 }
getUsage(void) const468 VkImageUsageFlags getUsage(void) const
469 {
470 return m_usage;
471 }
getFlags(void) const472 VkImageCreateFlags getFlags(void) const
473 {
474 return m_flags;
475 }
getExternalHandleTypes(void) const476 VkExternalMemoryHandleTypeFlags getExternalHandleTypes(void) const
477 {
478 return m_externalHandleTypes;
479 }
480
481 private:
482 const VkImageType m_imageType;
483 const VkFormat m_format;
484 const VkExtent3D m_extent;
485 const uint32_t m_arrayLayers;
486 const VkSampleCountFlagBits m_samples;
487 const VkImageUsageFlags m_usage;
488 const VkImageCreateFlags m_flags;
489 const VkExternalMemoryHandleTypeFlags m_externalHandleTypes;
490 };
491
allocateHeap(const VkMemoryAllocateInfo * pAllocInfo)492 void *allocateHeap(const VkMemoryAllocateInfo *pAllocInfo)
493 {
494 // \todo [2015-12-03 pyry] Alignment requirements?
495 // \todo [2015-12-03 pyry] Empty allocations okay?
496 if (pAllocInfo->allocationSize > 0)
497 {
498 void *const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
499 if (!heapPtr)
500 throw std::bad_alloc();
501 return heapPtr;
502 }
503 else
504 return nullptr;
505 }
506
freeHeap(void * ptr)507 void freeHeap(void *ptr)
508 {
509 deFree(ptr);
510 }
511
512 class DeviceMemory
513 {
514 public:
~DeviceMemory(void)515 virtual ~DeviceMemory(void)
516 {
517 }
518 virtual void *map(void) = 0;
519 virtual void unmap(void) = 0;
520 };
521
522 class PrivateDeviceMemory : public DeviceMemory
523 {
524 public:
PrivateDeviceMemory(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)525 PrivateDeviceMemory(VkDevice, const VkMemoryAllocateInfo *pAllocInfo) : m_memory(allocateHeap(pAllocInfo))
526 {
527 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
528 // but currently it mostly hinders it.
529 if (m_memory)
530 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
531 }
~PrivateDeviceMemory(void)532 virtual ~PrivateDeviceMemory(void)
533 {
534 freeHeap(m_memory);
535 }
536
map(void)537 virtual void *map(void) /*override*/
538 {
539 return m_memory;
540 }
unmap(void)541 virtual void unmap(void) /*override*/
542 {
543 }
544
545 private:
546 void *const m_memory;
547 };
548
549 #ifndef CTS_USES_VULKANSC
550
551 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
findOrCreateHwBuffer(const VkMemoryAllocateInfo * pAllocInfo)552 AHardwareBuffer *findOrCreateHwBuffer(const VkMemoryAllocateInfo *pAllocInfo)
553 {
554 const VkExportMemoryAllocateInfo *const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
555 const VkImportAndroidHardwareBufferInfoANDROID *const importInfo =
556 findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
557 const VkMemoryDedicatedAllocateInfo *const dedicatedInfo =
558 findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
559 const Image *const image = dedicatedInfo && !!dedicatedInfo->image ? dedicatedInfo->image.as<Image>() : nullptr;
560 AHardwareBuffer *hwbuffer = nullptr;
561
562 // Import and export aren't mutually exclusive; we can have both simultaneously.
563 DE_ASSERT((importInfo && importInfo->buffer.internal) ||
564 (exportInfo &&
565 (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
566
567 if (importInfo && importInfo->buffer.internal)
568 {
569 hwbuffer = (AHardwareBuffer *)importInfo->buffer.internal;
570 AHardwareBuffer_acquire(hwbuffer);
571 }
572 else if (exportInfo &&
573 (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
574 {
575 AHardwareBuffer_Desc hwbufferDesc;
576 deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
577
578 if (image)
579 {
580 hwbufferDesc.width = image->getExtent().width;
581 hwbufferDesc.height = image->getExtent().height;
582 hwbufferDesc.layers = image->getArrayLayers();
583 switch (image->getFormat())
584 {
585 case VK_FORMAT_R8G8B8A8_UNORM:
586 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
587 break;
588 case VK_FORMAT_R8G8B8_UNORM:
589 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
590 break;
591 case VK_FORMAT_R5G6B5_UNORM_PACK16:
592 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
593 break;
594 case VK_FORMAT_R16G16B16A16_SFLOAT:
595 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
596 break;
597 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
598 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
599 break;
600 default:
601 DE_FATAL("Unsupported image format for Android hardware buffer export");
602 break;
603 }
604 if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
605 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
606 if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
607 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
608 // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
609 // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
610
611 // Make sure we have at least one AHB GPU usage, even if the image doesn't have any
612 // Vulkan usages with corresponding to AHB GPU usages.
613 if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
614 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
615 }
616 else
617 {
618 hwbufferDesc.width = static_cast<uint32_t>(pAllocInfo->allocationSize);
619 hwbufferDesc.height = 1, hwbufferDesc.layers = 1, hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
620 hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
621 }
622
623 AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
624 }
625
626 return hwbuffer;
627 }
628
629 class ExternalDeviceMemoryAndroid : public DeviceMemory
630 {
631 public:
ExternalDeviceMemoryAndroid(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)632 ExternalDeviceMemoryAndroid(VkDevice, const VkMemoryAllocateInfo *pAllocInfo)
633 : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
634 {
635 }
~ExternalDeviceMemoryAndroid(void)636 virtual ~ExternalDeviceMemoryAndroid(void)
637 {
638 if (m_hwbuffer)
639 AHardwareBuffer_release(m_hwbuffer);
640 }
641
map(void)642 virtual void *map(void) /*override*/
643 {
644 void *p;
645 AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
646 -1, NULL, &p);
647 return p;
648 }
649
unmap(void)650 virtual void unmap(void) /*override*/
651 {
652 AHardwareBuffer_unlock(m_hwbuffer, NULL);
653 }
654
getHwBuffer(void)655 AHardwareBuffer *getHwBuffer(void)
656 {
657 return m_hwbuffer;
658 }
659
660 private:
661 AHardwareBuffer *const m_hwbuffer;
662 };
663 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
664
665 #endif // CTS_USES_VULKANSC
666
667 class DeferredOperationKHR
668 {
669 public:
DeferredOperationKHR(VkDevice)670 DeferredOperationKHR(VkDevice)
671 {
672 }
673 };
674
675 class CommandBuffer
676 {
677 public:
CommandBuffer(VkDevice,VkCommandPool,VkCommandBufferLevel)678 CommandBuffer(VkDevice, VkCommandPool, VkCommandBufferLevel)
679 {
680 }
681 };
682
683 class CommandPool
684 {
685 public:
CommandPool(VkDevice device,const VkCommandPoolCreateInfo *)686 CommandPool(VkDevice device, const VkCommandPoolCreateInfo *) : m_device(device)
687 {
688 }
689 #ifndef CTS_USES_VULKANSC
690 ~CommandPool(void);
691 #endif // CTS_USES_VULKANSC
692
693 VkCommandBuffer allocate(VkCommandBufferLevel level);
694 void free(VkCommandBuffer buffer);
695
696 private:
697 const VkDevice m_device;
698
699 vector<CommandBuffer *> m_buffers;
700 };
701
702 #ifndef CTS_USES_VULKANSC
703
~CommandPool(void)704 CommandPool::~CommandPool(void)
705 {
706 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
707 delete m_buffers[ndx];
708 }
709
710 #endif // CTS_USES_VULKANSC
711
allocate(VkCommandBufferLevel level)712 VkCommandBuffer CommandPool::allocate(VkCommandBufferLevel level)
713 {
714 CommandBuffer *const impl = new CommandBuffer(m_device, VkCommandPool(this), level);
715
716 try
717 {
718 m_buffers.push_back(impl);
719 }
720 catch (...)
721 {
722 delete impl;
723 throw;
724 }
725
726 return reinterpret_cast<VkCommandBuffer>(impl);
727 }
728
free(VkCommandBuffer buffer)729 void CommandPool::free(VkCommandBuffer buffer)
730 {
731 CommandBuffer *const impl = reinterpret_cast<CommandBuffer *>(buffer);
732
733 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
734 {
735 if (m_buffers[ndx] == impl)
736 {
737 std::swap(m_buffers[ndx], m_buffers.back());
738 m_buffers.pop_back();
739 delete impl;
740 return;
741 }
742 }
743
744 DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
745 }
746
747 class DescriptorSet
748 {
749 public:
DescriptorSet(VkDevice,VkDescriptorPool,VkDescriptorSetLayout)750 DescriptorSet(VkDevice, VkDescriptorPool, VkDescriptorSetLayout)
751 {
752 }
753 };
754
755 class DescriptorPool
756 {
757 public:
DescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo)758 DescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo)
759 : m_device(device)
760 , m_flags(pCreateInfo->flags)
761 {
762 }
~DescriptorPool(void)763 ~DescriptorPool(void)
764 {
765 reset();
766 }
767
768 VkDescriptorSet allocate(VkDescriptorSetLayout setLayout);
769 void free(VkDescriptorSet set);
770
771 void reset(void);
772
773 private:
774 const VkDevice m_device;
775 const VkDescriptorPoolCreateFlags m_flags;
776
777 vector<DescriptorSet *> m_managedSets;
778 };
779
allocate(VkDescriptorSetLayout setLayout)780 VkDescriptorSet DescriptorPool::allocate(VkDescriptorSetLayout setLayout)
781 {
782 DescriptorSet *const impl = new DescriptorSet(m_device, VkDescriptorPool(this), setLayout);
783
784 try
785 {
786 m_managedSets.push_back(impl);
787 }
788 catch (...)
789 {
790 delete impl;
791 throw;
792 }
793
794 return VkDescriptorSet(impl);
795 }
796
free(VkDescriptorSet set)797 void DescriptorPool::free(VkDescriptorSet set)
798 {
799 DescriptorSet *const impl = set.as<DescriptorSet>();
800
801 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
802 DE_UNREF(m_flags);
803
804 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
805 {
806 if (m_managedSets[ndx] == impl)
807 {
808 std::swap(m_managedSets[ndx], m_managedSets.back());
809 m_managedSets.pop_back();
810 delete impl;
811 return;
812 }
813 }
814
815 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
816 }
817
reset(void)818 void DescriptorPool::reset(void)
819 {
820 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
821 delete m_managedSets[ndx];
822 m_managedSets.clear();
823 }
824
825 // API implementation
826
827 extern "C"
828 {
829
getDeviceProcAddr(VkDevice device,const char * pName)830 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr(VkDevice device, const char *pName)
831 {
832 return reinterpret_cast<Device *>(device)->getProcAddr(pName);
833 }
834
createGraphicsPipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)835 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines(VkDevice device, VkPipelineCache, uint32_t count,
836 const VkGraphicsPipelineCreateInfo *pCreateInfos,
837 const VkAllocationCallbacks *pAllocator,
838 VkPipeline *pPipelines)
839 {
840 uint32_t allocNdx;
841 try
842 {
843 for (allocNdx = 0; allocNdx < count; allocNdx++)
844 pPipelines[allocNdx] =
845 allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
846
847 return VK_SUCCESS;
848 }
849 catch (const std::bad_alloc &)
850 {
851 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
852 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
853
854 return VK_ERROR_OUT_OF_HOST_MEMORY;
855 }
856 catch (VkResult err)
857 {
858 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
859 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
860
861 return err;
862 }
863 }
864
createComputePipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)865 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines(VkDevice device, VkPipelineCache, uint32_t count,
866 const VkComputePipelineCreateInfo *pCreateInfos,
867 const VkAllocationCallbacks *pAllocator,
868 VkPipeline *pPipelines)
869 {
870 uint32_t allocNdx;
871 try
872 {
873 for (allocNdx = 0; allocNdx < count; allocNdx++)
874 pPipelines[allocNdx] =
875 allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
876
877 return VK_SUCCESS;
878 }
879 catch (const std::bad_alloc &)
880 {
881 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
882 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
883
884 return VK_ERROR_OUT_OF_HOST_MEMORY;
885 }
886 catch (VkResult err)
887 {
888 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
889 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
890
891 return err;
892 }
893 }
894
895 #ifndef CTS_USES_VULKANSC
896
createRayTracingPipelinesNV(VkDevice device,VkPipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)897 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesNV(VkDevice device, VkPipelineCache, uint32_t count,
898 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
899 const VkAllocationCallbacks *pAllocator,
900 VkPipeline *pPipelines)
901 {
902 uint32_t allocNdx;
903 try
904 {
905 for (allocNdx = 0; allocNdx < count; allocNdx++)
906 pPipelines[allocNdx] =
907 allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
908
909 return VK_SUCCESS;
910 }
911 catch (const std::bad_alloc &)
912 {
913 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
914 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
915
916 return VK_ERROR_OUT_OF_HOST_MEMORY;
917 }
918 catch (VkResult err)
919 {
920 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
921 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
922
923 return err;
924 }
925 }
926
createRayTracingPipelinesKHR(VkDevice device,VkPipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)927 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesKHR(VkDevice device, VkPipelineCache, uint32_t count,
928 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
929 const VkAllocationCallbacks *pAllocator,
930 VkPipeline *pPipelines)
931 {
932 uint32_t allocNdx;
933 try
934 {
935 for (allocNdx = 0; allocNdx < count; allocNdx++)
936 pPipelines[allocNdx] =
937 allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
938
939 return VK_SUCCESS;
940 }
941 catch (const std::bad_alloc &)
942 {
943 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
944 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
945
946 return VK_ERROR_OUT_OF_HOST_MEMORY;
947 }
948 catch (VkResult err)
949 {
950 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
951 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
952
953 return err;
954 }
955 }
956
createShadersEXT(VkDevice device,uint32_t createInfoCount,const VkShaderCreateInfoEXT * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkShaderEXT * pShaders)957 VKAPI_ATTR VkResult VKAPI_CALL createShadersEXT(VkDevice device, uint32_t createInfoCount,
958 const VkShaderCreateInfoEXT *pCreateInfos,
959 const VkAllocationCallbacks *pAllocator, VkShaderEXT *pShaders)
960 {
961 uint32_t allocNdx;
962 try
963 {
964 for (allocNdx = 0; allocNdx < createInfoCount; allocNdx++)
965 pShaders[allocNdx] =
966 allocateNonDispHandle<ShaderEXT, VkShaderEXT>(device, pCreateInfos + allocNdx, pAllocator);
967 return VK_SUCCESS;
968 }
969 catch (const std::bad_alloc &)
970 {
971 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
972 freeNonDispHandle<ShaderEXT, VkShaderEXT>(pShaders[freeNdx], pAllocator);
973 return VK_ERROR_OUT_OF_HOST_MEMORY;
974 }
975 catch (VkResult err)
976 {
977 for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
978 freeNonDispHandle<ShaderEXT, VkShaderEXT>(pShaders[freeNdx], pAllocator);
979 return err;
980 }
981 }
982
983 #endif // CTS_USES_VULKANSC
984
enumeratePhysicalDevices(VkInstance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pDevices)985 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices(VkInstance, uint32_t *pPhysicalDeviceCount,
986 VkPhysicalDevice *pDevices)
987 {
988 if (pDevices && *pPhysicalDeviceCount >= 1u)
989 *pDevices = reinterpret_cast<VkPhysicalDevice>((void *)(uintptr_t)1u);
990
991 *pPhysicalDeviceCount = 1;
992
993 return VK_SUCCESS;
994 }
995
enumerateExtensions(uint32_t numExtensions,const VkExtensionProperties * extensions,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)996 VkResult enumerateExtensions(uint32_t numExtensions, const VkExtensionProperties *extensions,
997 uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
998 {
999 const uint32_t dstSize = pPropertyCount ? *pPropertyCount : 0;
1000
1001 if (pPropertyCount)
1002 *pPropertyCount = numExtensions;
1003
1004 if (pProperties)
1005 {
1006 for (uint32_t ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
1007 pProperties[ndx] = extensions[ndx];
1008
1009 if (dstSize < numExtensions)
1010 return VK_INCOMPLETE;
1011 }
1012
1013 return VK_SUCCESS;
1014 }
1015
enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1016 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties(const char *pLayerName,
1017 uint32_t *pPropertyCount,
1018 VkExtensionProperties *pProperties)
1019 {
1020 static const VkExtensionProperties s_extensions[] = {
1021 {"VK_KHR_get_physical_device_properties2", 1u},
1022 {"VK_KHR_external_memory_capabilities", 1u},
1023 };
1024
1025 if (!pLayerName)
1026 return enumerateExtensions((uint32_t)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount,
1027 pProperties);
1028 else
1029 return enumerateExtensions(0, nullptr, pPropertyCount, pProperties);
1030 }
1031
enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1032 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
1033 const char *pLayerName, uint32_t *pPropertyCount,
1034 VkExtensionProperties *pProperties)
1035 {
1036 DE_UNREF(physicalDevice);
1037
1038 static const VkExtensionProperties s_extensions[] = {
1039 {"VK_KHR_bind_memory2", 1u},
1040 {"VK_KHR_external_memory", 1u},
1041 {"VK_KHR_get_memory_requirements2", 1u},
1042 {"VK_KHR_maintenance1", 1u},
1043 {"VK_KHR_sampler_ycbcr_conversion", 1u},
1044 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1045 {"VK_ANDROID_external_memory_android_hardware_buffer", 1u},
1046 #endif
1047 };
1048
1049 if (!pLayerName)
1050 return enumerateExtensions((uint32_t)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount,
1051 pProperties);
1052 else
1053 return enumerateExtensions(0, nullptr, pPropertyCount, pProperties);
1054 }
1055
getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)1056 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
1057 VkPhysicalDeviceFeatures *pFeatures)
1058 {
1059 DE_UNREF(physicalDevice);
1060
1061 // Enable all features allow as many tests to run as possible
1062 pFeatures->robustBufferAccess = VK_TRUE;
1063 pFeatures->fullDrawIndexUint32 = VK_TRUE;
1064 pFeatures->imageCubeArray = VK_TRUE;
1065 pFeatures->independentBlend = VK_TRUE;
1066 pFeatures->geometryShader = VK_TRUE;
1067 pFeatures->tessellationShader = VK_TRUE;
1068 pFeatures->sampleRateShading = VK_TRUE;
1069 pFeatures->dualSrcBlend = VK_TRUE;
1070 pFeatures->logicOp = VK_TRUE;
1071 pFeatures->multiDrawIndirect = VK_TRUE;
1072 pFeatures->drawIndirectFirstInstance = VK_TRUE;
1073 pFeatures->depthClamp = VK_TRUE;
1074 pFeatures->depthBiasClamp = VK_TRUE;
1075 pFeatures->fillModeNonSolid = VK_TRUE;
1076 pFeatures->depthBounds = VK_TRUE;
1077 pFeatures->wideLines = VK_TRUE;
1078 pFeatures->largePoints = VK_TRUE;
1079 pFeatures->alphaToOne = VK_TRUE;
1080 pFeatures->multiViewport = VK_TRUE;
1081 pFeatures->samplerAnisotropy = VK_TRUE;
1082 pFeatures->textureCompressionETC2 = VK_TRUE;
1083 pFeatures->textureCompressionASTC_LDR = VK_TRUE;
1084 pFeatures->textureCompressionBC = VK_TRUE;
1085 pFeatures->occlusionQueryPrecise = VK_TRUE;
1086 pFeatures->pipelineStatisticsQuery = VK_TRUE;
1087 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE;
1088 pFeatures->fragmentStoresAndAtomics = VK_TRUE;
1089 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE;
1090 pFeatures->shaderImageGatherExtended = VK_TRUE;
1091 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE;
1092 pFeatures->shaderStorageImageMultisample = VK_TRUE;
1093 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE;
1094 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE;
1095 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
1096 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE;
1097 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
1098 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE;
1099 pFeatures->shaderClipDistance = VK_TRUE;
1100 pFeatures->shaderCullDistance = VK_TRUE;
1101 pFeatures->shaderFloat64 = VK_TRUE;
1102 pFeatures->shaderInt64 = VK_TRUE;
1103 pFeatures->shaderInt16 = VK_TRUE;
1104 pFeatures->shaderResourceResidency = VK_TRUE;
1105 pFeatures->shaderResourceMinLod = VK_TRUE;
1106 pFeatures->sparseBinding = VK_TRUE;
1107 pFeatures->sparseResidencyBuffer = VK_TRUE;
1108 pFeatures->sparseResidencyImage2D = VK_TRUE;
1109 pFeatures->sparseResidencyImage3D = VK_TRUE;
1110 pFeatures->sparseResidency2Samples = VK_TRUE;
1111 pFeatures->sparseResidency4Samples = VK_TRUE;
1112 pFeatures->sparseResidency8Samples = VK_TRUE;
1113 pFeatures->sparseResidency16Samples = VK_TRUE;
1114 pFeatures->sparseResidencyAliased = VK_TRUE;
1115 pFeatures->variableMultisampleRate = VK_TRUE;
1116 pFeatures->inheritedQueries = VK_TRUE;
1117 }
1118
getPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * props)1119 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties(VkPhysicalDevice, VkPhysicalDeviceProperties *props)
1120 {
1121 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
1122
1123 props->apiVersion = VK_API_VERSION_1_1;
1124 props->driverVersion = 1u;
1125 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
1126
1127 deMemcpy(props->deviceName, "null", 5);
1128
1129 // Spec minmax
1130 props->limits.maxImageDimension1D = 4096;
1131 props->limits.maxImageDimension2D = 4096;
1132 props->limits.maxImageDimension3D = 256;
1133 props->limits.maxImageDimensionCube = 4096;
1134 props->limits.maxImageArrayLayers = 256;
1135 props->limits.maxTexelBufferElements = 65536;
1136 props->limits.maxUniformBufferRange = 16384;
1137 props->limits.maxStorageBufferRange = 1u << 27;
1138 props->limits.maxPushConstantsSize = 128;
1139 props->limits.maxMemoryAllocationCount = 4096;
1140 props->limits.maxSamplerAllocationCount = 4000;
1141 props->limits.bufferImageGranularity = 131072;
1142 props->limits.sparseAddressSpaceSize = 1u << 31;
1143 props->limits.maxBoundDescriptorSets = 4;
1144 props->limits.maxPerStageDescriptorSamplers = 16;
1145 props->limits.maxPerStageDescriptorUniformBuffers = 12;
1146 props->limits.maxPerStageDescriptorStorageBuffers = 4;
1147 props->limits.maxPerStageDescriptorSampledImages = 16;
1148 props->limits.maxPerStageDescriptorStorageImages = 4;
1149 props->limits.maxPerStageDescriptorInputAttachments = 4;
1150 props->limits.maxPerStageResources = 128;
1151 props->limits.maxDescriptorSetSamplers = 96;
1152 props->limits.maxDescriptorSetUniformBuffers = 72;
1153 props->limits.maxDescriptorSetUniformBuffersDynamic = 8;
1154 props->limits.maxDescriptorSetStorageBuffers = 24;
1155 props->limits.maxDescriptorSetStorageBuffersDynamic = 4;
1156 props->limits.maxDescriptorSetSampledImages = 96;
1157 props->limits.maxDescriptorSetStorageImages = 24;
1158 props->limits.maxDescriptorSetInputAttachments = 4;
1159 props->limits.maxVertexInputAttributes = 16;
1160 props->limits.maxVertexInputBindings = 16;
1161 props->limits.maxVertexInputAttributeOffset = 2047;
1162 props->limits.maxVertexInputBindingStride = 2048;
1163 props->limits.maxVertexOutputComponents = 64;
1164 props->limits.maxTessellationGenerationLevel = 64;
1165 props->limits.maxTessellationPatchSize = 32;
1166 props->limits.maxTessellationControlPerVertexInputComponents = 64;
1167 props->limits.maxTessellationControlPerVertexOutputComponents = 64;
1168 props->limits.maxTessellationControlPerPatchOutputComponents = 120;
1169 props->limits.maxTessellationControlTotalOutputComponents = 2048;
1170 props->limits.maxTessellationEvaluationInputComponents = 64;
1171 props->limits.maxTessellationEvaluationOutputComponents = 64;
1172 props->limits.maxGeometryShaderInvocations = 32;
1173 props->limits.maxGeometryInputComponents = 64;
1174 props->limits.maxGeometryOutputComponents = 64;
1175 props->limits.maxGeometryOutputVertices = 256;
1176 props->limits.maxGeometryTotalOutputComponents = 1024;
1177 props->limits.maxFragmentInputComponents = 64;
1178 props->limits.maxFragmentOutputAttachments = 4;
1179 props->limits.maxFragmentDualSrcAttachments = 1;
1180 props->limits.maxFragmentCombinedOutputResources = 4;
1181 props->limits.maxComputeSharedMemorySize = 16384;
1182 props->limits.maxComputeWorkGroupCount[0] = 65535;
1183 props->limits.maxComputeWorkGroupCount[1] = 65535;
1184 props->limits.maxComputeWorkGroupCount[2] = 65535;
1185 props->limits.maxComputeWorkGroupInvocations = 128;
1186 props->limits.maxComputeWorkGroupSize[0] = 128;
1187 props->limits.maxComputeWorkGroupSize[1] = 128;
1188 props->limits.maxComputeWorkGroupSize[2] = 128;
1189 props->limits.subPixelPrecisionBits = 4;
1190 props->limits.subTexelPrecisionBits = 4;
1191 props->limits.mipmapPrecisionBits = 4;
1192 props->limits.maxDrawIndexedIndexValue = 0xffffffffu;
1193 props->limits.maxDrawIndirectCount = (1u << 16) - 1u;
1194 props->limits.maxSamplerLodBias = 2.0f;
1195 props->limits.maxSamplerAnisotropy = 16.0f;
1196 props->limits.maxViewports = 16;
1197 props->limits.maxViewportDimensions[0] = 4096;
1198 props->limits.maxViewportDimensions[1] = 4096;
1199 props->limits.viewportBoundsRange[0] = -8192.f;
1200 props->limits.viewportBoundsRange[1] = 8191.f;
1201 props->limits.viewportSubPixelBits = 0;
1202 props->limits.minMemoryMapAlignment = 64;
1203 props->limits.minTexelBufferOffsetAlignment = 256;
1204 props->limits.minUniformBufferOffsetAlignment = 256;
1205 props->limits.minStorageBufferOffsetAlignment = 256;
1206 props->limits.minTexelOffset = -8;
1207 props->limits.maxTexelOffset = 7;
1208 props->limits.minTexelGatherOffset = -8;
1209 props->limits.maxTexelGatherOffset = 7;
1210 props->limits.minInterpolationOffset = -0.5f;
1211 props->limits.maxInterpolationOffset = 0.5f; // -1ulp
1212 props->limits.subPixelInterpolationOffsetBits = 4;
1213 props->limits.maxFramebufferWidth = 4096;
1214 props->limits.maxFramebufferHeight = 4096;
1215 props->limits.maxFramebufferLayers = 256;
1216 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1217 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1218 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1219 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1220 props->limits.maxColorAttachments = 4;
1221 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1222 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT;
1223 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1224 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1225 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1226 props->limits.maxSampleMaskWords = 1;
1227 props->limits.timestampComputeAndGraphics = VK_TRUE;
1228 props->limits.timestampPeriod = 1.0f;
1229 props->limits.maxClipDistances = 8;
1230 props->limits.maxCullDistances = 8;
1231 props->limits.maxCombinedClipAndCullDistances = 8;
1232 props->limits.discreteQueuePriorities = 2;
1233 props->limits.pointSizeRange[0] = 1.0f;
1234 props->limits.pointSizeRange[1] = 64.0f; // -1ulp
1235 props->limits.lineWidthRange[0] = 1.0f;
1236 props->limits.lineWidthRange[1] = 8.0f; // -1ulp
1237 props->limits.pointSizeGranularity = 1.0f;
1238 props->limits.lineWidthGranularity = 1.0f;
1239 props->limits.strictLines = 0;
1240 props->limits.standardSampleLocations = VK_TRUE;
1241 props->limits.optimalBufferCopyOffsetAlignment = 256;
1242 props->limits.optimalBufferCopyRowPitchAlignment = 256;
1243 props->limits.nonCoherentAtomSize = 128;
1244 }
1245
getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,uint32_t * count,VkQueueFamilyProperties * props)1246 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice, uint32_t *count,
1247 VkQueueFamilyProperties *props)
1248 {
1249 if (props && *count >= 1u)
1250 {
1251 deMemset(props, 0, sizeof(VkQueueFamilyProperties));
1252
1253 props->queueCount = 4u;
1254 props->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1255 props->timestampValidBits = 64;
1256 }
1257
1258 *count = 1u;
1259 }
1260
getPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * props)1261 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties(VkPhysicalDevice,
1262 VkPhysicalDeviceMemoryProperties *props)
1263 {
1264 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
1265
1266 props->memoryTypeCount = 1u;
1267 props->memoryTypes[0].heapIndex = 0u;
1268 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1269 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
1270 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1271
1272 props->memoryHeapCount = 1u;
1273 props->memoryHeaps[0].size = 1ull << 31;
1274 props->memoryHeaps[0].flags = 0u;
1275 }
1276
getPhysicalDeviceFormatProperties(VkPhysicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1277 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties(VkPhysicalDevice, VkFormat format,
1278 VkFormatProperties *pFormatProperties)
1279 {
1280 const VkFormatFeatureFlags allFeatures =
1281 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT |
1282 VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT |
1283 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT |
1284 VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT |
1285 VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT |
1286 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
1287 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT |
1288 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT |
1289 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT |
1290 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT |
1291 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT |
1292 VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1293
1294 pFormatProperties->linearTilingFeatures = allFeatures;
1295 pFormatProperties->optimalTilingFeatures = allFeatures;
1296 pFormatProperties->bufferFeatures = allFeatures;
1297
1298 if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1299 pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1300 }
1301
getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1302 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties(
1303 VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling,
1304 VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
1305 {
1306 DE_UNREF(physicalDevice);
1307 DE_UNREF(format);
1308 DE_UNREF(type);
1309 DE_UNREF(tiling);
1310 DE_UNREF(usage);
1311 DE_UNREF(flags);
1312
1313 pImageFormatProperties->maxArrayLayers = 8;
1314 pImageFormatProperties->maxExtent.width = 4096;
1315 pImageFormatProperties->maxExtent.height = 4096;
1316 pImageFormatProperties->maxExtent.depth = 4096;
1317 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1;
1318 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u;
1319 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1320
1321 return VK_SUCCESS;
1322 }
1323
getDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1324 VKAPI_ATTR void VKAPI_CALL getDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1325 VkQueue *pQueue)
1326 {
1327 DE_UNREF(device);
1328 DE_UNREF(queueFamilyIndex);
1329
1330 if (pQueue)
1331 *pQueue = reinterpret_cast<VkQueue>((uint64_t)queueIndex + 1);
1332 }
1333
getBufferMemoryRequirements(VkDevice,VkBuffer bufferHandle,VkMemoryRequirements * requirements)1334 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements(VkDevice, VkBuffer bufferHandle,
1335 VkMemoryRequirements *requirements)
1336 {
1337 const Buffer *buffer = bufferHandle.as<Buffer>();
1338
1339 requirements->memoryTypeBits = 1u;
1340 requirements->size = buffer->getSize();
1341 requirements->alignment = (VkDeviceSize)1u;
1342 }
1343
getPackedImageDataSize(VkFormat format,VkExtent3D extent,VkSampleCountFlagBits samples)1344 VkDeviceSize getPackedImageDataSize(VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1345 {
1346 return (VkDeviceSize)getPixelSize(mapVkFormat(format)) * (VkDeviceSize)extent.width *
1347 (VkDeviceSize)extent.height * (VkDeviceSize)extent.depth * (VkDeviceSize)samples;
1348 }
1349
getCompressedImageDataSize(VkFormat format,VkExtent3D extent)1350 VkDeviceSize getCompressedImageDataSize(VkFormat format, VkExtent3D extent)
1351 {
1352 try
1353 {
1354 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
1355 const size_t blockSize = tcu::getBlockSize(tcuFormat);
1356 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat);
1357 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x());
1358 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y());
1359 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1360
1361 return blockSize * numBlocksX * numBlocksY * numBlocksZ;
1362 }
1363 catch (...)
1364 {
1365 return 0; // Unsupported compressed format
1366 }
1367 }
1368
getYCbCrImageDataSize(VkFormat format,VkExtent3D extent)1369 VkDeviceSize getYCbCrImageDataSize(VkFormat format, VkExtent3D extent)
1370 {
1371 const PlanarFormatDescription desc = getPlanarFormatDescription(format);
1372 VkDeviceSize totalSize = 0;
1373
1374 DE_ASSERT(extent.depth == 1);
1375
1376 for (uint32_t planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1377 {
1378 const uint32_t elementSize = desc.planes[planeNdx].elementSizeBytes;
1379
1380 totalSize = (VkDeviceSize)deAlign64((int64_t)totalSize, elementSize);
1381 totalSize += getPlaneSizeInBytes(desc, extent, planeNdx, 0, BUFFER_IMAGE_COPY_OFFSET_GRANULARITY);
1382 }
1383
1384 return totalSize;
1385 }
1386
getImageMemoryRequirements(VkDevice,VkImage imageHandle,VkMemoryRequirements * requirements)1387 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements(VkDevice, VkImage imageHandle,
1388 VkMemoryRequirements *requirements)
1389 {
1390 const Image *image = imageHandle.as<Image>();
1391
1392 requirements->memoryTypeBits = 1u;
1393 requirements->alignment = 16u;
1394
1395 if (isCompressedFormat(image->getFormat()))
1396 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1397 else if (isYCbCrFormat(image->getFormat()))
1398 requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1399 else
1400 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1401 }
1402
allocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1403 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1404 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1405 {
1406 #ifndef CTS_USES_VULKANSC
1407 const VkExportMemoryAllocateInfo *const exportInfo =
1408 findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1409 const VkImportAndroidHardwareBufferInfoANDROID *const importInfo =
1410 findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1411
1412 if ((exportInfo &&
1413 (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0) ||
1414 (importInfo && importInfo->buffer.internal))
1415 {
1416 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1417 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(
1418 device, pAllocateInfo, pAllocator)));
1419 #else
1420 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1421 #endif
1422 }
1423 else
1424 {
1425 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(
1426 device, pAllocateInfo, pAllocator)));
1427 }
1428 #else // CTS_USES_VULKANSC
1429 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(
1430 device, pAllocateInfo, pAllocator)));
1431 #endif // CTS_USES_VULKANSC
1432 }
1433
mapMemory(VkDevice,VkDeviceMemory memHandle,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1434 VKAPI_ATTR VkResult VKAPI_CALL mapMemory(VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size,
1435 VkMemoryMapFlags flags, void **ppData)
1436 {
1437 DeviceMemory *const memory = memHandle.as<DeviceMemory>();
1438
1439 DE_UNREF(size);
1440 DE_UNREF(flags);
1441
1442 *ppData = (uint8_t *)memory->map() + offset;
1443
1444 return VK_SUCCESS;
1445 }
1446
unmapMemory(VkDevice device,VkDeviceMemory memHandle)1447 VKAPI_ATTR void VKAPI_CALL unmapMemory(VkDevice device, VkDeviceMemory memHandle)
1448 {
1449 DeviceMemory *const memory = memHandle.as<DeviceMemory>();
1450
1451 DE_UNREF(device);
1452
1453 memory->unmap();
1454 }
1455
1456 #ifndef CTS_USES_VULKANSC
1457
1458 VKAPI_ATTR VkResult VKAPI_CALL
getMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,pt::AndroidHardwareBufferPtr * pBuffer)1459 getMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1460 pt::AndroidHardwareBufferPtr *pBuffer)
1461 {
1462 DE_UNREF(device);
1463
1464 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1465 DeviceMemory *const memory = pInfo->memory.as<ExternalDeviceMemoryAndroid>();
1466 ExternalDeviceMemoryAndroid *const androidMemory = static_cast<ExternalDeviceMemoryAndroid *>(memory);
1467
1468 AHardwareBuffer *hwbuffer = androidMemory->getHwBuffer();
1469 AHardwareBuffer_acquire(hwbuffer);
1470 pBuffer->internal = hwbuffer;
1471 #else
1472 DE_UNREF(pInfo);
1473 DE_UNREF(pBuffer);
1474 #endif
1475
1476 return VK_SUCCESS;
1477 }
1478
1479 #endif // CTS_USES_VULKANSC
1480
allocateDescriptorSets(VkDevice,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1481 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets(VkDevice, const VkDescriptorSetAllocateInfo *pAllocateInfo,
1482 VkDescriptorSet *pDescriptorSets)
1483 {
1484 DescriptorPool *const poolImpl = pAllocateInfo->descriptorPool.as<DescriptorPool>();
1485
1486 for (uint32_t ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1487 {
1488 try
1489 {
1490 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1491 }
1492 catch (const std::bad_alloc &)
1493 {
1494 for (uint32_t freeNdx = 0; freeNdx < ndx; freeNdx++)
1495 delete pDescriptorSets[freeNdx].as<DescriptorSet>();
1496
1497 return VK_ERROR_OUT_OF_HOST_MEMORY;
1498 }
1499 catch (VkResult res)
1500 {
1501 for (uint32_t freeNdx = 0; freeNdx < ndx; freeNdx++)
1502 delete pDescriptorSets[freeNdx].as<DescriptorSet>();
1503
1504 return res;
1505 }
1506 }
1507
1508 return VK_SUCCESS;
1509 }
1510
freeDescriptorSets(VkDevice,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1511 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets(VkDevice, VkDescriptorPool descriptorPool, uint32_t count,
1512 const VkDescriptorSet *pDescriptorSets)
1513 {
1514 DescriptorPool *const poolImpl = descriptorPool.as<DescriptorPool>();
1515
1516 for (uint32_t ndx = 0; ndx < count; ++ndx)
1517 poolImpl->free(pDescriptorSets[ndx]);
1518 }
1519
resetDescriptorPool(VkDevice,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags)1520 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool(VkDevice, VkDescriptorPool descriptorPool,
1521 VkDescriptorPoolResetFlags)
1522 {
1523 DescriptorPool *const poolImpl = descriptorPool.as<DescriptorPool>();
1524
1525 poolImpl->reset();
1526
1527 return VK_SUCCESS;
1528 }
1529
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)1530 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers(VkDevice device,
1531 const VkCommandBufferAllocateInfo *pAllocateInfo,
1532 VkCommandBuffer *pCommandBuffers)
1533 {
1534 DE_UNREF(device);
1535
1536 if (pAllocateInfo && pCommandBuffers)
1537 {
1538 CommandPool *const poolImpl = pAllocateInfo->commandPool.as<CommandPool>();
1539
1540 for (uint32_t ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1541 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1542 }
1543
1544 return VK_SUCCESS;
1545 }
1546
freeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)1547 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1548 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
1549 {
1550 CommandPool *const poolImpl = commandPool.as<CommandPool>();
1551
1552 DE_UNREF(device);
1553
1554 for (uint32_t ndx = 0; ndx < commandBufferCount; ++ndx)
1555 poolImpl->free(pCommandBuffers[ndx]);
1556 }
1557
createDisplayModeKHR(VkPhysicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)1558 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR(VkPhysicalDevice, VkDisplayKHR display,
1559 const VkDisplayModeCreateInfoKHR *pCreateInfo,
1560 const VkAllocationCallbacks *pAllocator,
1561 VkDisplayModeKHR *pMode)
1562 {
1563 DE_UNREF(pAllocator);
1564 VK_NULL_RETURN(
1565 (*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1566 }
1567
createSharedSwapchainsKHR(VkDevice device,uint32_t swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)1568 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
1569 const VkSwapchainCreateInfoKHR *pCreateInfos,
1570 const VkAllocationCallbacks *pAllocator,
1571 VkSwapchainKHR *pSwapchains)
1572 {
1573 for (uint32_t ndx = 0; ndx < swapchainCount; ++ndx)
1574 {
1575 pSwapchains[ndx] =
1576 allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos + ndx, pAllocator);
1577 }
1578
1579 return VK_SUCCESS;
1580 }
1581
getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1582 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR(
1583 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
1584 VkExternalBufferProperties *pExternalBufferProperties)
1585 {
1586 DE_UNREF(physicalDevice);
1587 DE_UNREF(pExternalBufferInfo);
1588
1589 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
1590 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1591 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0;
1592
1593 #ifndef CTS_USES_VULKANSC
1594 if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1595 {
1596 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures =
1597 VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1598 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes =
1599 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1600 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes =
1601 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1602 }
1603 #endif // CTS_USES_VULKANSC
1604 }
1605
getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1606 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR(
1607 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
1608 VkImageFormatProperties2 *pImageFormatProperties)
1609 {
1610 #ifndef CTS_USES_VULKANSC
1611 const VkPhysicalDeviceExternalImageFormatInfo *const externalInfo =
1612 findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1613 VkExternalImageFormatProperties *const externalProperties =
1614 findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1615 VkResult result;
1616
1617 result = getPhysicalDeviceImageFormatProperties(
1618 physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
1619 pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1620 if (result != VK_SUCCESS)
1621 return result;
1622
1623 if (externalInfo && externalInfo->handleType != 0)
1624 {
1625 if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1626 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1627
1628 if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM ||
1629 pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM ||
1630 pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16 ||
1631 pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT ||
1632 pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1633 {
1634 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1635 }
1636
1637 if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1638 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1639
1640 if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1641 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) != 0)
1642 {
1643 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1644 }
1645
1646 if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1647 /*| VK_IMAGE_CREATE_PROTECTED_BIT*/
1648 /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT*/)) != 0)
1649 {
1650 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1651 }
1652
1653 if (externalProperties)
1654 {
1655 externalProperties->externalMemoryProperties.externalMemoryFeatures =
1656 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
1657 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1658 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes =
1659 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1660 externalProperties->externalMemoryProperties.compatibleHandleTypes =
1661 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1662 }
1663 }
1664
1665 return VK_SUCCESS;
1666 #else // CTS_USES_VULKANSC
1667 return getPhysicalDeviceImageFormatProperties(
1668 physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
1669 pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1670 #endif // CTS_USES_VULKANSC
1671 }
1672
1673 // \note getInstanceProcAddr is a little bit special:
1674 // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1675 // getInstanceProcAddr() implementation needs other entry points from
1676 // vkNullDriverImpl.inl.
1677 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr(VkInstance instance, const char *pName);
1678
1679 #include "vkNullDriverImpl.inl"
1680
getInstanceProcAddr(VkInstance instance,const char * pName)1681 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr(VkInstance instance, const char *pName)
1682 {
1683 if (instance)
1684 {
1685 return reinterpret_cast<Instance *>(instance)->getProcAddr(pName);
1686 }
1687 else
1688 {
1689 const std::string name = pName;
1690
1691 if (name == "vkCreateInstance")
1692 return (PFN_vkVoidFunction)createInstance;
1693 else if (name == "vkEnumerateInstanceExtensionProperties")
1694 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1695 else if (name == "vkEnumerateInstanceLayerProperties")
1696 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1697 else
1698 return nullptr;
1699 }
1700 }
1701
1702 } // extern "C"
1703
Instance(const VkInstanceCreateInfo *)1704 Instance::Instance(const VkInstanceCreateInfo *)
1705 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1706 {
1707 }
1708
Device(VkPhysicalDevice,const VkDeviceCreateInfo *)1709 Device::Device(VkPhysicalDevice, const VkDeviceCreateInfo *)
1710 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1711 {
1712 }
1713
1714 class NullDriverLibrary : public Library
1715 {
1716 public:
NullDriverLibrary(void)1717 NullDriverLibrary(void)
1718 : m_library(s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1719 , m_driver(m_library)
1720 {
1721 }
1722
getPlatformInterface(void) const1723 const PlatformInterface &getPlatformInterface(void) const
1724 {
1725 return m_driver;
1726 }
getFunctionLibrary(void) const1727 const tcu::FunctionLibrary &getFunctionLibrary(void) const
1728 {
1729 return m_library;
1730 }
1731
1732 private:
1733 const tcu::StaticFunctionLibrary m_library;
1734 const PlatformDriver m_driver;
1735 };
1736
1737 } // namespace
1738
createNullDriver(void)1739 Library *createNullDriver(void)
1740 {
1741 return new NullDriverLibrary();
1742 }
1743
1744 } // namespace vk
1745