1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2015 Google Inc.
6 * Copyright (c) 2023 LunarG, Inc.
7 * Copyright (c) 2023 Nintendo
8 *
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
12 *
13 * http://www.apache.org/licenses/LICENSE-2.0
14 *
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
20 *
21 *//*!
22 * \file
23 * \brief Null (do-nothing) Vulkan implementation.
24 *//*--------------------------------------------------------------------*/
25
26 #include "vkNullDriver.hpp"
27 #include "vkPlatform.hpp"
28 #include "vkImageUtil.hpp"
29 #include "vkQueryUtil.hpp"
30 #include "tcuFunctionLibrary.hpp"
31 #include "deMemory.h"
32
33 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
34 # define USE_ANDROID_O_HARDWARE_BUFFER
35 #endif
36 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
37 # include <android/hardware_buffer.h>
38 #endif
39
40 #include <stdexcept>
41 #include <algorithm>
42
43 namespace vk
44 {
45
46 namespace
47 {
48
49 using std::vector;
50
51 // Memory management
52
53 template<typename T>
allocateSystemMem(const VkAllocationCallbacks * pAllocator,VkSystemAllocationScope scope)54 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
55 {
56 void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
57 if (!ptr)
58 throw std::bad_alloc();
59 return ptr;
60 }
61
freeSystemMem(const VkAllocationCallbacks * pAllocator,void * mem)62 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
63 {
64 pAllocator->pfnFree(pAllocator->pUserData, mem);
65 }
66
67 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)68 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
69 {
70 Object* obj = DE_NULL;
71
72 if (pAllocator)
73 {
74 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
75 try
76 {
77 obj = new (mem) Object(parent, pCreateInfo);
78 DE_ASSERT(obj == mem);
79 }
80 catch (...)
81 {
82 pAllocator->pfnFree(pAllocator->pUserData, mem);
83 throw;
84 }
85 }
86 else
87 obj = new Object(parent, pCreateInfo);
88
89 return reinterpret_cast<Handle>(obj);
90 }
91
92 template<typename Object, typename Handle, typename CreateInfo>
allocateHandle(const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)93 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
94 {
95 Object* obj = DE_NULL;
96
97 if (pAllocator)
98 {
99 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
100 try
101 {
102 obj = new (mem) Object(pCreateInfo);
103 DE_ASSERT(obj == mem);
104 }
105 catch (...)
106 {
107 pAllocator->pfnFree(pAllocator->pUserData, mem);
108 throw;
109 }
110 }
111 else
112 obj = new Object(pCreateInfo);
113
114 return reinterpret_cast<Handle>(obj);
115 }
116
117 template<typename Object, typename Handle, typename Parent>
allocateHandle(Parent parent,const VkAllocationCallbacks * pAllocator)118 Handle allocateHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
119 {
120 Object* obj = DE_NULL;
121
122 if (pAllocator)
123 {
124 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
125 try
126 {
127 obj = new (mem) Object(parent);
128 DE_ASSERT(obj == mem);
129 }
130 catch (...)
131 {
132 pAllocator->pfnFree(pAllocator->pUserData, mem);
133 throw;
134 }
135 }
136 else
137 obj = new Object(parent);
138
139 return reinterpret_cast<Handle>(obj);
140 }
141
142 template<typename Object, typename Handle>
freeHandle(Handle handle,const VkAllocationCallbacks * pAllocator)143 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
144 {
145 Object* obj = reinterpret_cast<Object*>(handle);
146
147 if (pAllocator)
148 {
149 obj->~Object();
150 freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
151 }
152 else
153 delete obj;
154 }
155
156 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandleArray(Parent parent,VkPipelineCache pipelineCache,uint32_t createInfoCount,const CreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,Handle * pHandles)157 void allocateNonDispHandleArray (Parent parent, VkPipelineCache pipelineCache, uint32_t createInfoCount, const CreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, Handle* pHandles)
158 {
159 (void)pipelineCache;
160 for (uint32_t i = 0; i < createInfoCount; i++) {
161 Object* const obj = allocateHandle<Object, Object*>(parent, &pCreateInfos[i], pAllocator);
162 pHandles[i] = Handle((deUint64)(deUintptr)obj);
163 }
164 }
165
166 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)167 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
168 {
169 Object* const obj = allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator);
170 return Handle((deUint64)(deUintptr)static_cast<BaseObject*>(obj));
171 }
172
173 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)174 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
175 {
176 return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
177 }
178
179 template<typename Object, typename Handle, typename Parent>
allocateNonDispHandle(Parent parent,const VkAllocationCallbacks * pAllocator)180 Handle allocateNonDispHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
181 {
182 Object* const obj = allocateHandle<Object, Object*>(parent, pAllocator);
183 return Handle((deUint64)(deUintptr)obj);
184 }
185
186 template<typename Object, typename Handle>
freeNonDispHandle(Handle handle,const VkAllocationCallbacks * pAllocator)187 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
188 {
189 freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
190 }
191
192 // Object definitions
193
194 #define VK_NULL_RETURN(STMT) \
195 do { \
196 try { \
197 STMT; \
198 return VK_SUCCESS; \
199 } catch (const std::bad_alloc&) { \
200 return VK_ERROR_OUT_OF_HOST_MEMORY; \
201 } catch (VkResult res) { \
202 return res; \
203 } \
204 } while (deGetFalse())
205
206 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
207 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) { #NAME, (deFunctionPtr)FUNC } // NOLINT(FUNC)
208
209 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \
210 struct NAME \
211 { \
212 NAME (VkDevice, const Vk##NAME##CreateInfo*) {} \
213 }
214
215 #define VK_NULL_DEFINE_OBJ_WITH_POSTFIX(DEVICE_OR_INSTANCE, NAME, POSTFIX) \
216 struct NAME##POSTFIX \
217 { \
218 NAME##POSTFIX (DEVICE_OR_INSTANCE, const Vk##NAME##CreateInfo##POSTFIX*) {} \
219 };
220
221 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
222 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
223 VK_NULL_DEFINE_DEVICE_OBJ(Event);
224 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
225 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
226 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
227 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
228 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
229 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
230 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
231 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
232 VK_NULL_DEFINE_DEVICE_OBJ(SamplerYcbcrConversion);
233 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Swapchain, KHR)
234 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugUtilsMessenger, EXT)
235
236 #ifdef CTS_USES_VULKANSC
237 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, SemaphoreSciSyncPool, NV)
238 #else
239 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
240 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorUpdateTemplate);
241 VK_NULL_DEFINE_DEVICE_OBJ(PrivateDataSlot);
242 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugReportCallback, EXT)
243 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuModule, NVX)
244 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuFunction, NVX)
245 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Micromap, EXT)
246 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, OpticalFlowSession, NV)
247 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, NV)
248 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, NV)
249 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, KHR)
250 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSession, KHR)
251 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSessionParameters, KHR)
252 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, ValidationCache, EXT)
253 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, BufferCollection, FUCHSIA)
254 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Shader, EXT)
255 #endif // CTS_USES_VULKANSC
256
257 class Instance
258 {
259 public:
260 Instance (const VkInstanceCreateInfo* instanceInfo);
~Instance(void)261 ~Instance (void) {}
262
getProcAddr(const char * name) const263 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
264
265 private:
266 const tcu::StaticFunctionLibrary m_functions;
267 };
268
269 class SurfaceKHR
270 {
271 public:
272 #ifndef CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkXlibSurfaceCreateInfoKHR *)273 SurfaceKHR (VkInstance, const VkXlibSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkXcbSurfaceCreateInfoKHR *)274 SurfaceKHR (VkInstance, const VkXcbSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkWaylandSurfaceCreateInfoKHR *)275 SurfaceKHR (VkInstance, const VkWaylandSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkAndroidSurfaceCreateInfoKHR *)276 SurfaceKHR (VkInstance, const VkAndroidSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkWin32SurfaceCreateInfoKHR *)277 SurfaceKHR (VkInstance, const VkWin32SurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkViSurfaceCreateInfoNN *)278 SurfaceKHR (VkInstance, const VkViSurfaceCreateInfoNN*) {}
SurfaceKHR(VkInstance,const VkIOSSurfaceCreateInfoMVK *)279 SurfaceKHR (VkInstance, const VkIOSSurfaceCreateInfoMVK*) {}
SurfaceKHR(VkInstance,const VkMacOSSurfaceCreateInfoMVK *)280 SurfaceKHR (VkInstance, const VkMacOSSurfaceCreateInfoMVK*) {}
SurfaceKHR(VkInstance,const VkImagePipeSurfaceCreateInfoFUCHSIA *)281 SurfaceKHR (VkInstance, const VkImagePipeSurfaceCreateInfoFUCHSIA*) {}
SurfaceKHR(VkInstance,const VkStreamDescriptorSurfaceCreateInfoGGP *)282 SurfaceKHR (VkInstance, const VkStreamDescriptorSurfaceCreateInfoGGP*) {}
SurfaceKHR(VkInstance,const VkMetalSurfaceCreateInfoEXT *)283 SurfaceKHR (VkInstance, const VkMetalSurfaceCreateInfoEXT*) {}
SurfaceKHR(VkInstance,const VkSurfaceCreateInfoOHOS *)284 SurfaceKHR (VkInstance, const VkSurfaceCreateInfoOHOS*) {}
SurfaceKHR(VkInstance,const VkScreenSurfaceCreateInfoQNX *)285 SurfaceKHR (VkInstance, const VkScreenSurfaceCreateInfoQNX*) {}
286 #endif // CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkDisplaySurfaceCreateInfoKHR *)287 SurfaceKHR (VkInstance, const VkDisplaySurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkHeadlessSurfaceCreateInfoEXT *)288 SurfaceKHR (VkInstance, const VkHeadlessSurfaceCreateInfoEXT*) {}
~SurfaceKHR(void)289 ~SurfaceKHR (void) {}
290 };
291
292 class DisplayModeKHR
293 {
294 public:
DisplayModeKHR(VkDisplayKHR,const VkDisplayModeCreateInfoKHR *)295 DisplayModeKHR (VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {}
~DisplayModeKHR(void)296 ~DisplayModeKHR (void) {}
297 };
298
299 class Device
300 {
301 public:
302 Device (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
~Device(void)303 ~Device (void) {}
304
getProcAddr(const char * name) const305 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
306
307 private:
308 const tcu::StaticFunctionLibrary m_functions;
309 };
310
311 class Pipeline
312 {
313 public:
Pipeline(VkDevice,const VkGraphicsPipelineCreateInfo *)314 Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
Pipeline(VkDevice,const VkComputePipelineCreateInfo *)315 Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
316 #ifndef CTS_USES_VULKANSC
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoNV *)317 Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoNV*) {}
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoKHR *)318 Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoKHR*) {}
Pipeline(VkDevice,const VkExecutionGraphPipelineCreateInfoAMDX *)319 Pipeline (VkDevice, const VkExecutionGraphPipelineCreateInfoAMDX*) {}
320 #endif // CTS_USES_VULKANSC
321 };
322
323 class RenderPass
324 {
325 public:
RenderPass(VkDevice,const VkRenderPassCreateInfo *)326 RenderPass (VkDevice, const VkRenderPassCreateInfo*) {}
RenderPass(VkDevice,const VkRenderPassCreateInfo2 *)327 RenderPass (VkDevice, const VkRenderPassCreateInfo2*) {}
328 };
329
330 class Buffer
331 {
332 public:
Buffer(VkDevice,const VkBufferCreateInfo * pCreateInfo)333 Buffer (VkDevice, const VkBufferCreateInfo* pCreateInfo)
334 : m_size (pCreateInfo->size)
335 {
336 }
337
getSize(void) const338 VkDeviceSize getSize (void) const { return m_size; }
339
340 private:
341 const VkDeviceSize m_size;
342 };
343
getExternalTypesHandle(const VkImageCreateInfo * pCreateInfo)344 VkExternalMemoryHandleTypeFlags getExternalTypesHandle (const VkImageCreateInfo* pCreateInfo)
345 {
346 const VkExternalMemoryImageCreateInfo* const externalInfo = findStructure<VkExternalMemoryImageCreateInfo> (pCreateInfo->pNext);
347
348 return externalInfo ? externalInfo->handleTypes : 0u;
349 }
350
351 class Image
352 {
353 public:
Image(VkDevice,const VkImageCreateInfo * pCreateInfo)354 Image (VkDevice, const VkImageCreateInfo* pCreateInfo)
355 : m_imageType (pCreateInfo->imageType)
356 , m_format (pCreateInfo->format)
357 , m_extent (pCreateInfo->extent)
358 , m_arrayLayers (pCreateInfo->arrayLayers)
359 , m_samples (pCreateInfo->samples)
360 , m_usage (pCreateInfo->usage)
361 , m_flags (pCreateInfo->flags)
362 , m_externalHandleTypes (getExternalTypesHandle(pCreateInfo))
363 {
364 }
365
getImageType(void) const366 VkImageType getImageType (void) const { return m_imageType; }
getFormat(void) const367 VkFormat getFormat (void) const { return m_format; }
getExtent(void) const368 VkExtent3D getExtent (void) const { return m_extent; }
getArrayLayers(void) const369 deUint32 getArrayLayers (void) const { return m_arrayLayers; }
getSamples(void) const370 VkSampleCountFlagBits getSamples (void) const { return m_samples; }
getUsage(void) const371 VkImageUsageFlags getUsage (void) const { return m_usage; }
getFlags(void) const372 VkImageCreateFlags getFlags (void) const { return m_flags; }
getExternalHandleTypes(void) const373 VkExternalMemoryHandleTypeFlags getExternalHandleTypes (void) const { return m_externalHandleTypes; }
374
375 private:
376 const VkImageType m_imageType;
377 const VkFormat m_format;
378 const VkExtent3D m_extent;
379 const deUint32 m_arrayLayers;
380 const VkSampleCountFlagBits m_samples;
381 const VkImageUsageFlags m_usage;
382 const VkImageCreateFlags m_flags;
383 const VkExternalMemoryHandleTypeFlags m_externalHandleTypes;
384 };
385
allocateHeap(const VkMemoryAllocateInfo * pAllocInfo)386 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
387 {
388 // \todo [2015-12-03 pyry] Alignment requirements?
389 // \todo [2015-12-03 pyry] Empty allocations okay?
390 if (pAllocInfo->allocationSize > 0)
391 {
392 void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
393 if (!heapPtr)
394 throw std::bad_alloc();
395 return heapPtr;
396 }
397 else
398 return DE_NULL;
399 }
400
freeHeap(void * ptr)401 void freeHeap (void* ptr)
402 {
403 deFree(ptr);
404 }
405
406 class DeviceMemory
407 {
408 public:
~DeviceMemory(void)409 virtual ~DeviceMemory (void) {}
410 virtual void* map (void) = 0;
411 virtual void unmap (void) = 0;
412 };
413
414 class PrivateDeviceMemory : public DeviceMemory
415 {
416 public:
PrivateDeviceMemory(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)417 PrivateDeviceMemory (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
418 : m_memory(allocateHeap(pAllocInfo))
419 {
420 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
421 // but currently it mostly hinders it.
422 if (m_memory)
423 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
424 }
~PrivateDeviceMemory(void)425 virtual ~PrivateDeviceMemory (void)
426 {
427 freeHeap(m_memory);
428 }
429
map(void)430 virtual void* map (void) /*override*/ { return m_memory; }
unmap(void)431 virtual void unmap (void) /*override*/ {}
432
433 private:
434 void* const m_memory;
435 };
436
437 #ifndef CTS_USES_VULKANSC
438
439 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
findOrCreateHwBuffer(const VkMemoryAllocateInfo * pAllocInfo)440 AHardwareBuffer* findOrCreateHwBuffer (const VkMemoryAllocateInfo* pAllocInfo)
441 {
442 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
443 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
444 const VkMemoryDedicatedAllocateInfo* const dedicatedInfo = findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
445 const Image* const image = dedicatedInfo && !!dedicatedInfo->image ? reinterpret_cast<const Image*>(dedicatedInfo->image.getInternal()) : DE_NULL;
446 AHardwareBuffer* hwbuffer = DE_NULL;
447
448 // Import and export aren't mutually exclusive; we can have both simultaneously.
449 DE_ASSERT((importInfo && importInfo->buffer.internal) ||
450 (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
451
452 if (importInfo && importInfo->buffer.internal)
453 {
454 hwbuffer = (AHardwareBuffer*)importInfo->buffer.internal;
455 AHardwareBuffer_acquire(hwbuffer);
456 }
457 else if (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
458 {
459 AHardwareBuffer_Desc hwbufferDesc;
460 deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
461
462 if (image)
463 {
464 hwbufferDesc.width = image->getExtent().width;
465 hwbufferDesc.height = image->getExtent().height;
466 hwbufferDesc.layers = image->getArrayLayers();
467 switch (image->getFormat())
468 {
469 case VK_FORMAT_R8G8B8A8_UNORM:
470 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
471 break;
472 case VK_FORMAT_R8G8B8_UNORM:
473 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
474 break;
475 case VK_FORMAT_R5G6B5_UNORM_PACK16:
476 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
477 break;
478 case VK_FORMAT_R16G16B16A16_SFLOAT:
479 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
480 break;
481 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
482 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
483 break;
484 default:
485 DE_FATAL("Unsupported image format for Android hardware buffer export");
486 break;
487 }
488 if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
489 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
490 if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
491 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
492 // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
493 // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
494
495 // Make sure we have at least one AHB GPU usage, even if the image doesn't have any
496 // Vulkan usages with corresponding to AHB GPU usages.
497 if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
498 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
499 }
500 else
501 {
502 hwbufferDesc.width = static_cast<deUint32>(pAllocInfo->allocationSize);
503 hwbufferDesc.height = 1,
504 hwbufferDesc.layers = 1,
505 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
506 hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
507 }
508
509 AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
510 }
511
512 return hwbuffer;
513 }
514
515 class ExternalDeviceMemoryAndroid : public DeviceMemory
516 {
517 public:
ExternalDeviceMemoryAndroid(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)518 ExternalDeviceMemoryAndroid (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
519 : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
520 {}
~ExternalDeviceMemoryAndroid(void)521 virtual ~ExternalDeviceMemoryAndroid (void)
522 {
523 if (m_hwbuffer)
524 AHardwareBuffer_release(m_hwbuffer);
525 }
526
map(void)527 virtual void* map (void) /*override*/
528 {
529 void* p;
530 AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, NULL, &p);
531 return p;
532 }
533
unmap(void)534 virtual void unmap (void) /*override*/ { AHardwareBuffer_unlock(m_hwbuffer, NULL); }
535
getHwBuffer(void)536 AHardwareBuffer* getHwBuffer (void) { return m_hwbuffer; }
537
538 private:
539 AHardwareBuffer* const m_hwbuffer;
540 };
541 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
542
543 #endif // CTS_USES_VULKANSC
544
545 class DeferredOperationKHR
546 {
547 public:
DeferredOperationKHR(VkDevice)548 DeferredOperationKHR (VkDevice)
549 {}
550 };
551
552 class CommandBuffer
553 {
554 public:
CommandBuffer(VkDevice,VkCommandPool,VkCommandBufferLevel)555 CommandBuffer (VkDevice, VkCommandPool, VkCommandBufferLevel)
556 {}
557 };
558
559 class CommandPool
560 {
561 public:
CommandPool(VkDevice device,const VkCommandPoolCreateInfo *)562 CommandPool (VkDevice device, const VkCommandPoolCreateInfo*)
563 : m_device(device)
564 {}
565 #ifndef CTS_USES_VULKANSC
566 ~CommandPool (void);
567 #endif // CTS_USES_VULKANSC
568
569 VkCommandBuffer allocate (VkCommandBufferLevel level);
570 void free (VkCommandBuffer buffer);
571
572 private:
573 const VkDevice m_device;
574
575 vector<CommandBuffer*> m_buffers;
576 };
577
578 #ifndef CTS_USES_VULKANSC
579
~CommandPool(void)580 CommandPool::~CommandPool (void)
581 {
582 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
583 delete m_buffers[ndx];
584 }
585
586 #endif // CTS_USES_VULKANSC
587
allocate(VkCommandBufferLevel level)588 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level)
589 {
590 CommandBuffer* const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level);
591
592 try
593 {
594 m_buffers.push_back(impl);
595 }
596 catch (...)
597 {
598 delete impl;
599 throw;
600 }
601
602 return reinterpret_cast<VkCommandBuffer>(impl);
603 }
604
free(VkCommandBuffer buffer)605 void CommandPool::free (VkCommandBuffer buffer)
606 {
607 CommandBuffer* const impl = reinterpret_cast<CommandBuffer*>(buffer);
608
609 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
610 {
611 if (m_buffers[ndx] == impl)
612 {
613 std::swap(m_buffers[ndx], m_buffers.back());
614 m_buffers.pop_back();
615 delete impl;
616 return;
617 }
618 }
619
620 DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
621 }
622
623 class DescriptorSet
624 {
625 public:
DescriptorSet(VkDevice,VkDescriptorPool,VkDescriptorSetLayout)626 DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
627 };
628
629 class DescriptorPool
630 {
631 public:
DescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo)632 DescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
633 : m_device (device)
634 , m_flags (pCreateInfo->flags)
635 {}
~DescriptorPool(void)636 ~DescriptorPool (void)
637 {
638 reset();
639 }
640
641 VkDescriptorSet allocate (VkDescriptorSetLayout setLayout);
642 void free (VkDescriptorSet set);
643
644 void reset (void);
645
646 private:
647 const VkDevice m_device;
648 const VkDescriptorPoolCreateFlags m_flags;
649
650 vector<DescriptorSet*> m_managedSets;
651 };
652
allocate(VkDescriptorSetLayout setLayout)653 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
654 {
655 DescriptorSet* const impl = new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
656
657 try
658 {
659 m_managedSets.push_back(impl);
660 }
661 catch (...)
662 {
663 delete impl;
664 throw;
665 }
666
667 return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
668 }
669
free(VkDescriptorSet set)670 void DescriptorPool::free (VkDescriptorSet set)
671 {
672 DescriptorSet* const impl = reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
673
674 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
675 DE_UNREF(m_flags);
676
677 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
678 {
679 if (m_managedSets[ndx] == impl)
680 {
681 std::swap(m_managedSets[ndx], m_managedSets.back());
682 m_managedSets.pop_back();
683 delete impl;
684 return;
685 }
686 }
687
688 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
689 }
690
reset(void)691 void DescriptorPool::reset (void)
692 {
693 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
694 delete m_managedSets[ndx];
695 m_managedSets.clear();
696 }
697
698 // API implementation
699
700 extern "C"
701 {
702
getDeviceProcAddr(VkDevice device,const char * pName)703 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
704 {
705 return reinterpret_cast<Device*>(device)->getProcAddr(pName);
706 }
707
createGraphicsPipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)708 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
709 {
710 deUint32 allocNdx;
711 try
712 {
713 for (allocNdx = 0; allocNdx < count; allocNdx++)
714 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
715
716 return VK_SUCCESS;
717 }
718 catch (const std::bad_alloc&)
719 {
720 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
721 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
722
723 return VK_ERROR_OUT_OF_HOST_MEMORY;
724 }
725 catch (VkResult err)
726 {
727 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
728 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
729
730 return err;
731 }
732 }
733
createComputePipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)734 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
735 {
736 deUint32 allocNdx;
737 try
738 {
739 for (allocNdx = 0; allocNdx < count; allocNdx++)
740 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
741
742 return VK_SUCCESS;
743 }
744 catch (const std::bad_alloc&)
745 {
746 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
747 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
748
749 return VK_ERROR_OUT_OF_HOST_MEMORY;
750 }
751 catch (VkResult err)
752 {
753 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
754 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
755
756 return err;
757 }
758 }
759
760 #ifndef CTS_USES_VULKANSC
761
createRayTracingPipelinesNV(VkDevice device,VkPipelineCache,deUint32 count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)762 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesNV (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
763 {
764 deUint32 allocNdx;
765 try
766 {
767 for (allocNdx = 0; allocNdx < count; allocNdx++)
768 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
769
770 return VK_SUCCESS;
771 }
772 catch (const std::bad_alloc&)
773 {
774 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
775 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
776
777 return VK_ERROR_OUT_OF_HOST_MEMORY;
778 }
779 catch (VkResult err)
780 {
781 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
782 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
783
784 return err;
785 }
786 }
787
createRayTracingPipelinesKHR(VkDevice device,VkPipelineCache,deUint32 count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)788 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesKHR (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
789 {
790 deUint32 allocNdx;
791 try
792 {
793 for (allocNdx = 0; allocNdx < count; allocNdx++)
794 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
795
796 return VK_SUCCESS;
797 }
798 catch (const std::bad_alloc&)
799 {
800 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
801 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
802
803 return VK_ERROR_OUT_OF_HOST_MEMORY;
804 }
805 catch (VkResult err)
806 {
807 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
808 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
809
810 return err;
811 }
812 }
813
createShadersEXT(VkDevice device,uint32_t createInfoCount,const VkShaderCreateInfoEXT * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkShaderEXT * pShaders)814 VKAPI_ATTR VkResult VKAPI_CALL createShadersEXT (VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders)
815 {
816 deUint32 allocNdx;
817 try
818 {
819 for (allocNdx = 0; allocNdx < createInfoCount; allocNdx++)
820 pShaders[allocNdx] = allocateNonDispHandle<ShaderEXT, VkShaderEXT>(device, pCreateInfos + allocNdx, pAllocator);
821 return VK_SUCCESS;
822 }
823 catch (const std::bad_alloc&)
824 {
825 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
826 freeNonDispHandle<ShaderEXT, VkShaderEXT>(pShaders[freeNdx], pAllocator);
827 return VK_ERROR_OUT_OF_HOST_MEMORY;
828 }
829 catch (VkResult err)
830 {
831 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
832 freeNonDispHandle<ShaderEXT, VkShaderEXT>(pShaders[freeNdx], pAllocator);
833 return err;
834 }
835 }
836
837 #endif // CTS_USES_VULKANSC
838
enumeratePhysicalDevices(VkInstance,deUint32 * pPhysicalDeviceCount,VkPhysicalDevice * pDevices)839 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
840 {
841 if (pDevices && *pPhysicalDeviceCount >= 1u)
842 *pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
843
844 *pPhysicalDeviceCount = 1;
845
846 return VK_SUCCESS;
847 }
848
enumerateExtensions(deUint32 numExtensions,const VkExtensionProperties * extensions,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)849 VkResult enumerateExtensions (deUint32 numExtensions, const VkExtensionProperties* extensions, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
850 {
851 const deUint32 dstSize = pPropertyCount ? *pPropertyCount : 0;
852
853 if (pPropertyCount)
854 *pPropertyCount = numExtensions;
855
856 if (pProperties)
857 {
858 for (deUint32 ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
859 pProperties[ndx] = extensions[ndx];
860
861 if (dstSize < numExtensions)
862 return VK_INCOMPLETE;
863 }
864
865 return VK_SUCCESS;
866 }
867
enumerateInstanceExtensionProperties(const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)868 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
869 {
870 static const VkExtensionProperties s_extensions[] =
871 {
872 { "VK_KHR_get_physical_device_properties2", 1u },
873 { "VK_KHR_external_memory_capabilities", 1u },
874 };
875
876 if (!pLayerName)
877 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
878 else
879 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
880 }
881
enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)882 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
883 {
884 DE_UNREF(physicalDevice);
885
886 static const VkExtensionProperties s_extensions[] =
887 {
888 { "VK_KHR_bind_memory2", 1u },
889 { "VK_KHR_external_memory", 1u },
890 { "VK_KHR_get_memory_requirements2", 1u },
891 { "VK_KHR_maintenance1", 1u },
892 { "VK_KHR_sampler_ycbcr_conversion", 1u },
893 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
894 { "VK_ANDROID_external_memory_android_hardware_buffer", 1u },
895 #endif
896 };
897
898 if (!pLayerName)
899 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
900 else
901 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
902 }
903
getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)904 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
905 {
906 DE_UNREF(physicalDevice);
907
908 // Enable all features allow as many tests to run as possible
909 pFeatures->robustBufferAccess = VK_TRUE;
910 pFeatures->fullDrawIndexUint32 = VK_TRUE;
911 pFeatures->imageCubeArray = VK_TRUE;
912 pFeatures->independentBlend = VK_TRUE;
913 pFeatures->geometryShader = VK_TRUE;
914 pFeatures->tessellationShader = VK_TRUE;
915 pFeatures->sampleRateShading = VK_TRUE;
916 pFeatures->dualSrcBlend = VK_TRUE;
917 pFeatures->logicOp = VK_TRUE;
918 pFeatures->multiDrawIndirect = VK_TRUE;
919 pFeatures->drawIndirectFirstInstance = VK_TRUE;
920 pFeatures->depthClamp = VK_TRUE;
921 pFeatures->depthBiasClamp = VK_TRUE;
922 pFeatures->fillModeNonSolid = VK_TRUE;
923 pFeatures->depthBounds = VK_TRUE;
924 pFeatures->wideLines = VK_TRUE;
925 pFeatures->largePoints = VK_TRUE;
926 pFeatures->alphaToOne = VK_TRUE;
927 pFeatures->multiViewport = VK_TRUE;
928 pFeatures->samplerAnisotropy = VK_TRUE;
929 pFeatures->textureCompressionETC2 = VK_TRUE;
930 pFeatures->textureCompressionASTC_LDR = VK_TRUE;
931 pFeatures->textureCompressionBC = VK_TRUE;
932 pFeatures->occlusionQueryPrecise = VK_TRUE;
933 pFeatures->pipelineStatisticsQuery = VK_TRUE;
934 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE;
935 pFeatures->fragmentStoresAndAtomics = VK_TRUE;
936 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE;
937 pFeatures->shaderImageGatherExtended = VK_TRUE;
938 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE;
939 pFeatures->shaderStorageImageMultisample = VK_TRUE;
940 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE;
941 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE;
942 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
943 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE;
944 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
945 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE;
946 pFeatures->shaderClipDistance = VK_TRUE;
947 pFeatures->shaderCullDistance = VK_TRUE;
948 pFeatures->shaderFloat64 = VK_TRUE;
949 pFeatures->shaderInt64 = VK_TRUE;
950 pFeatures->shaderInt16 = VK_TRUE;
951 pFeatures->shaderResourceResidency = VK_TRUE;
952 pFeatures->shaderResourceMinLod = VK_TRUE;
953 pFeatures->sparseBinding = VK_TRUE;
954 pFeatures->sparseResidencyBuffer = VK_TRUE;
955 pFeatures->sparseResidencyImage2D = VK_TRUE;
956 pFeatures->sparseResidencyImage3D = VK_TRUE;
957 pFeatures->sparseResidency2Samples = VK_TRUE;
958 pFeatures->sparseResidency4Samples = VK_TRUE;
959 pFeatures->sparseResidency8Samples = VK_TRUE;
960 pFeatures->sparseResidency16Samples = VK_TRUE;
961 pFeatures->sparseResidencyAliased = VK_TRUE;
962 pFeatures->variableMultisampleRate = VK_TRUE;
963 pFeatures->inheritedQueries = VK_TRUE;
964 }
965
getPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * props)966 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
967 {
968 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
969
970 props->apiVersion = VK_API_VERSION_1_1;
971 props->driverVersion = 1u;
972 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
973
974 deMemcpy(props->deviceName, "null", 5);
975
976 // Spec minmax
977 props->limits.maxImageDimension1D = 4096;
978 props->limits.maxImageDimension2D = 4096;
979 props->limits.maxImageDimension3D = 256;
980 props->limits.maxImageDimensionCube = 4096;
981 props->limits.maxImageArrayLayers = 256;
982 props->limits.maxTexelBufferElements = 65536;
983 props->limits.maxUniformBufferRange = 16384;
984 props->limits.maxStorageBufferRange = 1u<<27;
985 props->limits.maxPushConstantsSize = 128;
986 props->limits.maxMemoryAllocationCount = 4096;
987 props->limits.maxSamplerAllocationCount = 4000;
988 props->limits.bufferImageGranularity = 131072;
989 props->limits.sparseAddressSpaceSize = 1u<<31;
990 props->limits.maxBoundDescriptorSets = 4;
991 props->limits.maxPerStageDescriptorSamplers = 16;
992 props->limits.maxPerStageDescriptorUniformBuffers = 12;
993 props->limits.maxPerStageDescriptorStorageBuffers = 4;
994 props->limits.maxPerStageDescriptorSampledImages = 16;
995 props->limits.maxPerStageDescriptorStorageImages = 4;
996 props->limits.maxPerStageDescriptorInputAttachments = 4;
997 props->limits.maxPerStageResources = 128;
998 props->limits.maxDescriptorSetSamplers = 96;
999 props->limits.maxDescriptorSetUniformBuffers = 72;
1000 props->limits.maxDescriptorSetUniformBuffersDynamic = 8;
1001 props->limits.maxDescriptorSetStorageBuffers = 24;
1002 props->limits.maxDescriptorSetStorageBuffersDynamic = 4;
1003 props->limits.maxDescriptorSetSampledImages = 96;
1004 props->limits.maxDescriptorSetStorageImages = 24;
1005 props->limits.maxDescriptorSetInputAttachments = 4;
1006 props->limits.maxVertexInputAttributes = 16;
1007 props->limits.maxVertexInputBindings = 16;
1008 props->limits.maxVertexInputAttributeOffset = 2047;
1009 props->limits.maxVertexInputBindingStride = 2048;
1010 props->limits.maxVertexOutputComponents = 64;
1011 props->limits.maxTessellationGenerationLevel = 64;
1012 props->limits.maxTessellationPatchSize = 32;
1013 props->limits.maxTessellationControlPerVertexInputComponents = 64;
1014 props->limits.maxTessellationControlPerVertexOutputComponents = 64;
1015 props->limits.maxTessellationControlPerPatchOutputComponents = 120;
1016 props->limits.maxTessellationControlTotalOutputComponents = 2048;
1017 props->limits.maxTessellationEvaluationInputComponents = 64;
1018 props->limits.maxTessellationEvaluationOutputComponents = 64;
1019 props->limits.maxGeometryShaderInvocations = 32;
1020 props->limits.maxGeometryInputComponents = 64;
1021 props->limits.maxGeometryOutputComponents = 64;
1022 props->limits.maxGeometryOutputVertices = 256;
1023 props->limits.maxGeometryTotalOutputComponents = 1024;
1024 props->limits.maxFragmentInputComponents = 64;
1025 props->limits.maxFragmentOutputAttachments = 4;
1026 props->limits.maxFragmentDualSrcAttachments = 1;
1027 props->limits.maxFragmentCombinedOutputResources = 4;
1028 props->limits.maxComputeSharedMemorySize = 16384;
1029 props->limits.maxComputeWorkGroupCount[0] = 65535;
1030 props->limits.maxComputeWorkGroupCount[1] = 65535;
1031 props->limits.maxComputeWorkGroupCount[2] = 65535;
1032 props->limits.maxComputeWorkGroupInvocations = 128;
1033 props->limits.maxComputeWorkGroupSize[0] = 128;
1034 props->limits.maxComputeWorkGroupSize[1] = 128;
1035 props->limits.maxComputeWorkGroupSize[2] = 128;
1036 props->limits.subPixelPrecisionBits = 4;
1037 props->limits.subTexelPrecisionBits = 4;
1038 props->limits.mipmapPrecisionBits = 4;
1039 props->limits.maxDrawIndexedIndexValue = 0xffffffffu;
1040 props->limits.maxDrawIndirectCount = (1u<<16) - 1u;
1041 props->limits.maxSamplerLodBias = 2.0f;
1042 props->limits.maxSamplerAnisotropy = 16.0f;
1043 props->limits.maxViewports = 16;
1044 props->limits.maxViewportDimensions[0] = 4096;
1045 props->limits.maxViewportDimensions[1] = 4096;
1046 props->limits.viewportBoundsRange[0] = -8192.f;
1047 props->limits.viewportBoundsRange[1] = 8191.f;
1048 props->limits.viewportSubPixelBits = 0;
1049 props->limits.minMemoryMapAlignment = 64;
1050 props->limits.minTexelBufferOffsetAlignment = 256;
1051 props->limits.minUniformBufferOffsetAlignment = 256;
1052 props->limits.minStorageBufferOffsetAlignment = 256;
1053 props->limits.minTexelOffset = -8;
1054 props->limits.maxTexelOffset = 7;
1055 props->limits.minTexelGatherOffset = -8;
1056 props->limits.maxTexelGatherOffset = 7;
1057 props->limits.minInterpolationOffset = -0.5f;
1058 props->limits.maxInterpolationOffset = 0.5f; // -1ulp
1059 props->limits.subPixelInterpolationOffsetBits = 4;
1060 props->limits.maxFramebufferWidth = 4096;
1061 props->limits.maxFramebufferHeight = 4096;
1062 props->limits.maxFramebufferLayers = 256;
1063 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1064 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1065 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1066 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1067 props->limits.maxColorAttachments = 4;
1068 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1069 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT;
1070 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1071 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1072 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1073 props->limits.maxSampleMaskWords = 1;
1074 props->limits.timestampComputeAndGraphics = VK_TRUE;
1075 props->limits.timestampPeriod = 1.0f;
1076 props->limits.maxClipDistances = 8;
1077 props->limits.maxCullDistances = 8;
1078 props->limits.maxCombinedClipAndCullDistances = 8;
1079 props->limits.discreteQueuePriorities = 2;
1080 props->limits.pointSizeRange[0] = 1.0f;
1081 props->limits.pointSizeRange[1] = 64.0f; // -1ulp
1082 props->limits.lineWidthRange[0] = 1.0f;
1083 props->limits.lineWidthRange[1] = 8.0f; // -1ulp
1084 props->limits.pointSizeGranularity = 1.0f;
1085 props->limits.lineWidthGranularity = 1.0f;
1086 props->limits.strictLines = 0;
1087 props->limits.standardSampleLocations = VK_TRUE;
1088 props->limits.optimalBufferCopyOffsetAlignment = 256;
1089 props->limits.optimalBufferCopyRowPitchAlignment = 256;
1090 props->limits.nonCoherentAtomSize = 128;
1091 }
1092
getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,deUint32 * count,VkQueueFamilyProperties * props)1093 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
1094 {
1095 if (props && *count >= 1u)
1096 {
1097 deMemset(props, 0, sizeof(VkQueueFamilyProperties));
1098
1099 props->queueCount = 4u;
1100 props->queueFlags = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
1101 props->timestampValidBits = 64;
1102 }
1103
1104 *count = 1u;
1105 }
1106
getPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * props)1107 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
1108 {
1109 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
1110
1111 props->memoryTypeCount = 1u;
1112 props->memoryTypes[0].heapIndex = 0u;
1113 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
1114 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
1115 | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1116
1117 props->memoryHeapCount = 1u;
1118 props->memoryHeaps[0].size = 1ull << 31;
1119 props->memoryHeaps[0].flags = 0u;
1120 }
1121
getPhysicalDeviceFormatProperties(VkPhysicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1122 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
1123 {
1124 const VkFormatFeatureFlags allFeatures = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
1125 | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
1126 | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
1127 | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
1128 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
1129 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
1130 | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
1131 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
1132 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
1133 | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
1134 | VK_FORMAT_FEATURE_BLIT_SRC_BIT
1135 | VK_FORMAT_FEATURE_BLIT_DST_BIT
1136 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
1137 | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
1138 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
1139 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
1140 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
1141 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
1142 | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1143
1144 pFormatProperties->linearTilingFeatures = allFeatures;
1145 pFormatProperties->optimalTilingFeatures = allFeatures;
1146 pFormatProperties->bufferFeatures = allFeatures;
1147
1148 if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1149 pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1150 }
1151
getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1152 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
1153 {
1154 DE_UNREF(physicalDevice);
1155 DE_UNREF(format);
1156 DE_UNREF(type);
1157 DE_UNREF(tiling);
1158 DE_UNREF(usage);
1159 DE_UNREF(flags);
1160
1161 pImageFormatProperties->maxArrayLayers = 8;
1162 pImageFormatProperties->maxExtent.width = 4096;
1163 pImageFormatProperties->maxExtent.height = 4096;
1164 pImageFormatProperties->maxExtent.depth = 4096;
1165 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1;
1166 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u;
1167 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1168
1169 return VK_SUCCESS;
1170 }
1171
getDeviceQueue(VkDevice device,deUint32 queueFamilyIndex,deUint32 queueIndex,VkQueue * pQueue)1172 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
1173 {
1174 DE_UNREF(device);
1175 DE_UNREF(queueFamilyIndex);
1176
1177 if (pQueue)
1178 *pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1);
1179 }
1180
getBufferMemoryRequirements(VkDevice,VkBuffer bufferHandle,VkMemoryRequirements * requirements)1181 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
1182 {
1183 const Buffer* buffer = reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
1184
1185 requirements->memoryTypeBits = 1u;
1186 requirements->size = buffer->getSize();
1187 requirements->alignment = (VkDeviceSize)1u;
1188 }
1189
getPackedImageDataSize(VkFormat format,VkExtent3D extent,VkSampleCountFlagBits samples)1190 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1191 {
1192 return (VkDeviceSize)getPixelSize(mapVkFormat(format))
1193 * (VkDeviceSize)extent.width
1194 * (VkDeviceSize)extent.height
1195 * (VkDeviceSize)extent.depth
1196 * (VkDeviceSize)samples;
1197 }
1198
getCompressedImageDataSize(VkFormat format,VkExtent3D extent)1199 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
1200 {
1201 try
1202 {
1203 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
1204 const size_t blockSize = tcu::getBlockSize(tcuFormat);
1205 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat);
1206 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x());
1207 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y());
1208 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1209
1210 return blockSize*numBlocksX*numBlocksY*numBlocksZ;
1211 }
1212 catch (...)
1213 {
1214 return 0; // Unsupported compressed format
1215 }
1216 }
1217
getYCbCrImageDataSize(VkFormat format,VkExtent3D extent)1218 VkDeviceSize getYCbCrImageDataSize (VkFormat format, VkExtent3D extent)
1219 {
1220 const PlanarFormatDescription desc = getPlanarFormatDescription(format);
1221 VkDeviceSize totalSize = 0;
1222
1223 DE_ASSERT(extent.depth == 1);
1224
1225 for (deUint32 planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1226 {
1227 const deUint32 elementSize = desc.planes[planeNdx].elementSizeBytes;
1228
1229 totalSize = (VkDeviceSize)deAlign64((deInt64)totalSize, elementSize);
1230 totalSize += getPlaneSizeInBytes(desc, extent, planeNdx, 0, BUFFER_IMAGE_COPY_OFFSET_GRANULARITY);
1231 }
1232
1233 return totalSize;
1234 }
1235
getImageMemoryRequirements(VkDevice,VkImage imageHandle,VkMemoryRequirements * requirements)1236 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
1237 {
1238 const Image* image = reinterpret_cast<const Image*>(imageHandle.getInternal());
1239
1240 requirements->memoryTypeBits = 1u;
1241 requirements->alignment = 16u;
1242
1243 if (isCompressedFormat(image->getFormat()))
1244 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1245 else if (isYCbCrFormat(image->getFormat()))
1246 requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1247 else
1248 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1249 }
1250
allocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1251 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
1252 {
1253 #ifndef CTS_USES_VULKANSC
1254 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1255 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1256
1257 if ((exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
1258 || (importInfo && importInfo->buffer.internal))
1259 {
1260 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1261 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1262 #else
1263 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1264 #endif
1265 }
1266 else
1267 {
1268 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1269 }
1270 #else // CTS_USES_VULKANSC
1271 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1272 #endif // CTS_USES_VULKANSC
1273 }
1274
mapMemory(VkDevice,VkDeviceMemory memHandle,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1275 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
1276 {
1277 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1278
1279 DE_UNREF(size);
1280 DE_UNREF(flags);
1281
1282 *ppData = (deUint8*)memory->map() + offset;
1283
1284 return VK_SUCCESS;
1285 }
1286
unmapMemory(VkDevice device,VkDeviceMemory memHandle)1287 VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memHandle)
1288 {
1289 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1290
1291 DE_UNREF(device);
1292
1293 memory->unmap();
1294 }
1295
1296 #ifndef CTS_USES_VULKANSC
1297
getMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,pt::AndroidHardwareBufferPtr * pBuffer)1298 VKAPI_ATTR VkResult VKAPI_CALL getMemoryAndroidHardwareBufferANDROID (VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, pt::AndroidHardwareBufferPtr* pBuffer)
1299 {
1300 DE_UNREF(device);
1301
1302 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1303 DeviceMemory* const memory = reinterpret_cast<ExternalDeviceMemoryAndroid*>(pInfo->memory.getInternal());
1304 ExternalDeviceMemoryAndroid* const androidMemory = static_cast<ExternalDeviceMemoryAndroid*>(memory);
1305
1306 AHardwareBuffer* hwbuffer = androidMemory->getHwBuffer();
1307 AHardwareBuffer_acquire(hwbuffer);
1308 pBuffer->internal = hwbuffer;
1309 #else
1310 DE_UNREF(pInfo);
1311 DE_UNREF(pBuffer);
1312 #endif
1313
1314 return VK_SUCCESS;
1315 }
1316
1317 #endif // CTS_USES_VULKANSC
1318
allocateDescriptorSets(VkDevice,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1319 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
1320 {
1321 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
1322
1323 for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1324 {
1325 try
1326 {
1327 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1328 }
1329 catch (const std::bad_alloc&)
1330 {
1331 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1332 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1333
1334 return VK_ERROR_OUT_OF_HOST_MEMORY;
1335 }
1336 catch (VkResult res)
1337 {
1338 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1339 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1340
1341 return res;
1342 }
1343 }
1344
1345 return VK_SUCCESS;
1346 }
1347
freeDescriptorSets(VkDevice,VkDescriptorPool descriptorPool,deUint32 count,const VkDescriptorSet * pDescriptorSets)1348 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
1349 {
1350 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1351
1352 for (deUint32 ndx = 0; ndx < count; ++ndx)
1353 poolImpl->free(pDescriptorSets[ndx]);
1354 }
1355
resetDescriptorPool(VkDevice,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags)1356 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
1357 {
1358 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1359
1360 poolImpl->reset();
1361
1362 return VK_SUCCESS;
1363 }
1364
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)1365 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
1366 {
1367 DE_UNREF(device);
1368
1369 if (pAllocateInfo && pCommandBuffers)
1370 {
1371 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal());
1372
1373 for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1374 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1375 }
1376
1377 return VK_SUCCESS;
1378 }
1379
freeCommandBuffers(VkDevice device,VkCommandPool commandPool,deUint32 commandBufferCount,const VkCommandBuffer * pCommandBuffers)1380 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
1381 {
1382 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal());
1383
1384 DE_UNREF(device);
1385
1386 for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
1387 poolImpl->free(pCommandBuffers[ndx]);
1388 }
1389
1390
createDisplayModeKHR(VkPhysicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)1391 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode)
1392 {
1393 DE_UNREF(pAllocator);
1394 VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1395 }
1396
createSharedSwapchainsKHR(VkDevice device,deUint32 swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)1397 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains)
1398 {
1399 for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx)
1400 {
1401 pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator);
1402 }
1403
1404 return VK_SUCCESS;
1405 }
1406
getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1407 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties)
1408 {
1409 DE_UNREF(physicalDevice);
1410 DE_UNREF(pExternalBufferInfo);
1411
1412 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
1413 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1414 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0;
1415
1416 #ifndef CTS_USES_VULKANSC
1417 if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1418 {
1419 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1420 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1421 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1422 }
1423 #endif // CTS_USES_VULKANSC
1424 }
1425
getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1426 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties)
1427 {
1428 #ifndef CTS_USES_VULKANSC
1429 const VkPhysicalDeviceExternalImageFormatInfo* const externalInfo = findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1430 VkExternalImageFormatProperties* const externalProperties = findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1431 VkResult result;
1432
1433 result = getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1434 if (result != VK_SUCCESS)
1435 return result;
1436
1437 if (externalInfo && externalInfo->handleType != 0)
1438 {
1439 if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1440 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1441
1442 if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM
1443 || pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM
1444 || pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16
1445 || pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT
1446 || pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1447 {
1448 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1449 }
1450
1451 if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1452 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1453
1454 if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT
1455 | VK_IMAGE_USAGE_TRANSFER_DST_BIT
1456 | VK_IMAGE_USAGE_SAMPLED_BIT
1457 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
1458 != 0)
1459 {
1460 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1461 }
1462
1463 if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1464 /*| VK_IMAGE_CREATE_PROTECTED_BIT*/
1465 /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT*/))
1466 != 0)
1467 {
1468 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1469 }
1470
1471 if (externalProperties)
1472 {
1473 externalProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT
1474 | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT
1475 | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1476 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1477 externalProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1478 }
1479 }
1480
1481 return VK_SUCCESS;
1482 #else // CTS_USES_VULKANSC
1483 return getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1484 #endif // CTS_USES_VULKANSC
1485 }
1486
1487 // \note getInstanceProcAddr is a little bit special:
1488 // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1489 // getInstanceProcAddr() implementation needs other entry points from
1490 // vkNullDriverImpl.inl.
1491 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName);
1492
1493 #include "vkNullDriverImpl.inl"
1494
getInstanceProcAddr(VkInstance instance,const char * pName)1495 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
1496 {
1497 if (instance)
1498 {
1499 return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
1500 }
1501 else
1502 {
1503 const std::string name = pName;
1504
1505 if (name == "vkCreateInstance")
1506 return (PFN_vkVoidFunction)createInstance;
1507 else if (name == "vkEnumerateInstanceExtensionProperties")
1508 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1509 else if (name == "vkEnumerateInstanceLayerProperties")
1510 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1511 else
1512 return (PFN_vkVoidFunction)DE_NULL;
1513 }
1514 }
1515
1516 } // extern "C"
1517
Instance(const VkInstanceCreateInfo *)1518 Instance::Instance (const VkInstanceCreateInfo*)
1519 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1520 {
1521 }
1522
Device(VkPhysicalDevice,const VkDeviceCreateInfo *)1523 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
1524 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1525 {
1526 }
1527
1528 class NullDriverLibrary : public Library
1529 {
1530 public:
NullDriverLibrary(void)1531 NullDriverLibrary (void)
1532 : m_library (s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1533 , m_driver (m_library)
1534 {}
1535
getPlatformInterface(void) const1536 const PlatformInterface& getPlatformInterface (void) const { return m_driver; }
getFunctionLibrary(void) const1537 const tcu::FunctionLibrary& getFunctionLibrary (void) const { return m_library; }
1538 private:
1539 const tcu::StaticFunctionLibrary m_library;
1540 const PlatformDriver m_driver;
1541 };
1542
1543 } // anonymous
1544
createNullDriver(void)1545 Library* createNullDriver (void)
1546 {
1547 return new NullDriverLibrary();
1548 }
1549
1550 } // vk
1551