1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2015 Google Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Null (do-nothing) Vulkan implementation.
22 *//*--------------------------------------------------------------------*/
23
24 #include "vkNullDriver.hpp"
25 #include "vkPlatform.hpp"
26 #include "vkImageUtil.hpp"
27 #include "vkQueryUtil.hpp"
28 #include "tcuFunctionLibrary.hpp"
29 #include "deMemory.h"
30
31 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
32 # define USE_ANDROID_O_HARDWARE_BUFFER
33 #endif
34 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
35 # include <android/hardware_buffer.h>
36 #endif
37
38 #include <stdexcept>
39 #include <algorithm>
40
41 namespace vk
42 {
43
44 namespace
45 {
46
47 using std::vector;
48
49 // Memory management
50
51 template<typename T>
allocateSystemMem(const VkAllocationCallbacks * pAllocator,VkSystemAllocationScope scope)52 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
53 {
54 void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
55 if (!ptr)
56 throw std::bad_alloc();
57 return ptr;
58 }
59
freeSystemMem(const VkAllocationCallbacks * pAllocator,void * mem)60 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
61 {
62 pAllocator->pfnFree(pAllocator->pUserData, mem);
63 }
64
65 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)66 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
67 {
68 Object* obj = DE_NULL;
69
70 if (pAllocator)
71 {
72 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
73 try
74 {
75 obj = new (mem) Object(parent, pCreateInfo);
76 DE_ASSERT(obj == mem);
77 }
78 catch (...)
79 {
80 pAllocator->pfnFree(pAllocator->pUserData, mem);
81 throw;
82 }
83 }
84 else
85 obj = new Object(parent, pCreateInfo);
86
87 return reinterpret_cast<Handle>(obj);
88 }
89
90 template<typename Object, typename Handle, typename CreateInfo>
allocateHandle(const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)91 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
92 {
93 Object* obj = DE_NULL;
94
95 if (pAllocator)
96 {
97 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
98 try
99 {
100 obj = new (mem) Object(pCreateInfo);
101 DE_ASSERT(obj == mem);
102 }
103 catch (...)
104 {
105 pAllocator->pfnFree(pAllocator->pUserData, mem);
106 throw;
107 }
108 }
109 else
110 obj = new Object(pCreateInfo);
111
112 return reinterpret_cast<Handle>(obj);
113 }
114
115 template<typename Object, typename Handle, typename Parent>
allocateHandle(Parent parent,const VkAllocationCallbacks * pAllocator)116 Handle allocateHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
117 {
118 Object* obj = DE_NULL;
119
120 if (pAllocator)
121 {
122 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
123 try
124 {
125 obj = new (mem) Object(parent);
126 DE_ASSERT(obj == mem);
127 }
128 catch (...)
129 {
130 pAllocator->pfnFree(pAllocator->pUserData, mem);
131 throw;
132 }
133 }
134 else
135 obj = new Object(parent);
136
137 return reinterpret_cast<Handle>(obj);
138 }
139
140 template<typename Object, typename Handle>
freeHandle(Handle handle,const VkAllocationCallbacks * pAllocator)141 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
142 {
143 Object* obj = reinterpret_cast<Object*>(handle);
144
145 if (pAllocator)
146 {
147 obj->~Object();
148 freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
149 }
150 else
151 delete obj;
152 }
153
154 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)155 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
156 {
157 Object* const obj = allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator);
158 return Handle((deUint64)(deUintptr)static_cast<BaseObject*>(obj));
159 }
160
161 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)162 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
163 {
164 return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
165 }
166
167 template<typename Object, typename Handle, typename Parent>
allocateNonDispHandle(Parent parent,const VkAllocationCallbacks * pAllocator)168 Handle allocateNonDispHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
169 {
170 Object* const obj = allocateHandle<Object, Object*>(parent, pAllocator);
171 return Handle((deUint64)(deUintptr)obj);
172 }
173
174 template<typename Object, typename Handle>
freeNonDispHandle(Handle handle,const VkAllocationCallbacks * pAllocator)175 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
176 {
177 freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
178 }
179
180 // Object definitions
181
182 #define VK_NULL_RETURN(STMT) \
183 do { \
184 try { \
185 STMT; \
186 return VK_SUCCESS; \
187 } catch (const std::bad_alloc&) { \
188 return VK_ERROR_OUT_OF_HOST_MEMORY; \
189 } catch (VkResult res) { \
190 return res; \
191 } \
192 } while (deGetFalse())
193
194 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
195 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) { #NAME, (deFunctionPtr)FUNC } // NOLINT(FUNC)
196
197 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \
198 struct NAME \
199 { \
200 NAME (VkDevice, const Vk##NAME##CreateInfo*) {} \
201 }
202
203 #define VK_NULL_DEFINE_OBJ_WITH_POSTFIX(DEVICE_OR_INSTANCE, NAME, POSTFIX) \
204 struct NAME##POSTFIX \
205 { \
206 NAME##POSTFIX (DEVICE_OR_INSTANCE, const Vk##NAME##CreateInfo##POSTFIX*) {} \
207 };
208
209 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
210 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
211 VK_NULL_DEFINE_DEVICE_OBJ(Event);
212 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
213 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
214 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
215 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
216 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
217 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
218 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
219 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
220 VK_NULL_DEFINE_DEVICE_OBJ(SamplerYcbcrConversion);
221 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Swapchain, KHR)
222 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugUtilsMessenger, EXT)
223
224 #ifndef CTS_USES_VULKANSC
225 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
226 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorUpdateTemplate);
227 VK_NULL_DEFINE_DEVICE_OBJ(PrivateDataSlot);
228 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugReportCallback, EXT)
229 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuModule, NVX)
230 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuFunction, NVX)
231 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Micromap, EXT)
232 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, OpticalFlowSession, NV)
233 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, NV)
234 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, NV)
235 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, KHR)
236 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSession, KHR)
237 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSessionParameters, KHR)
238 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, ValidationCache, EXT)
239 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, BufferCollection, FUCHSIA)
240 #endif // CTS_USES_VULKANSC
241
242 class Instance
243 {
244 public:
245 Instance (const VkInstanceCreateInfo* instanceInfo);
~Instance(void)246 ~Instance (void) {}
247
getProcAddr(const char * name) const248 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
249
250 private:
251 const tcu::StaticFunctionLibrary m_functions;
252 };
253
254 class SurfaceKHR
255 {
256 public:
257 #ifndef CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkXlibSurfaceCreateInfoKHR *)258 SurfaceKHR (VkInstance, const VkXlibSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkXcbSurfaceCreateInfoKHR *)259 SurfaceKHR (VkInstance, const VkXcbSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkWaylandSurfaceCreateInfoKHR *)260 SurfaceKHR (VkInstance, const VkWaylandSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkAndroidSurfaceCreateInfoKHR *)261 SurfaceKHR (VkInstance, const VkAndroidSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkWin32SurfaceCreateInfoKHR *)262 SurfaceKHR (VkInstance, const VkWin32SurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkViSurfaceCreateInfoNN *)263 SurfaceKHR (VkInstance, const VkViSurfaceCreateInfoNN*) {}
SurfaceKHR(VkInstance,const VkIOSSurfaceCreateInfoMVK *)264 SurfaceKHR (VkInstance, const VkIOSSurfaceCreateInfoMVK*) {}
SurfaceKHR(VkInstance,const VkMacOSSurfaceCreateInfoMVK *)265 SurfaceKHR (VkInstance, const VkMacOSSurfaceCreateInfoMVK*) {}
SurfaceKHR(VkInstance,const VkImagePipeSurfaceCreateInfoFUCHSIA *)266 SurfaceKHR (VkInstance, const VkImagePipeSurfaceCreateInfoFUCHSIA*) {}
SurfaceKHR(VkInstance,const VkStreamDescriptorSurfaceCreateInfoGGP *)267 SurfaceKHR (VkInstance, const VkStreamDescriptorSurfaceCreateInfoGGP*) {}
SurfaceKHR(VkInstance,const VkMetalSurfaceCreateInfoEXT *)268 SurfaceKHR (VkInstance, const VkMetalSurfaceCreateInfoEXT*) {}
SurfaceKHR(VkInstance,const VkScreenSurfaceCreateInfoQNX *)269 SurfaceKHR (VkInstance, const VkScreenSurfaceCreateInfoQNX*) {}
270 #endif // CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkDisplaySurfaceCreateInfoKHR *)271 SurfaceKHR (VkInstance, const VkDisplaySurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkHeadlessSurfaceCreateInfoEXT *)272 SurfaceKHR (VkInstance, const VkHeadlessSurfaceCreateInfoEXT*) {}
~SurfaceKHR(void)273 ~SurfaceKHR (void) {}
274 };
275
276 class DisplayModeKHR
277 {
278 public:
DisplayModeKHR(VkDisplayKHR,const VkDisplayModeCreateInfoKHR *)279 DisplayModeKHR (VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {}
~DisplayModeKHR(void)280 ~DisplayModeKHR (void) {}
281 };
282
283 class Device
284 {
285 public:
286 Device (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
~Device(void)287 ~Device (void) {}
288
getProcAddr(const char * name) const289 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
290
291 private:
292 const tcu::StaticFunctionLibrary m_functions;
293 };
294
295 class Pipeline
296 {
297 public:
Pipeline(VkDevice,const VkGraphicsPipelineCreateInfo *)298 Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
Pipeline(VkDevice,const VkComputePipelineCreateInfo *)299 Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
300 #ifndef CTS_USES_VULKANSC
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoNV *)301 Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoNV*) {}
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoKHR *)302 Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoKHR*) {}
303 #endif // CTS_USES_VULKANSC
304 };
305
306 class RenderPass
307 {
308 public:
RenderPass(VkDevice,const VkRenderPassCreateInfo *)309 RenderPass (VkDevice, const VkRenderPassCreateInfo*) {}
RenderPass(VkDevice,const VkRenderPassCreateInfo2 *)310 RenderPass (VkDevice, const VkRenderPassCreateInfo2*) {}
311 };
312
313 class Buffer
314 {
315 public:
Buffer(VkDevice,const VkBufferCreateInfo * pCreateInfo)316 Buffer (VkDevice, const VkBufferCreateInfo* pCreateInfo)
317 : m_size (pCreateInfo->size)
318 {
319 }
320
getSize(void) const321 VkDeviceSize getSize (void) const { return m_size; }
322
323 private:
324 const VkDeviceSize m_size;
325 };
326
getExternalTypesHandle(const VkImageCreateInfo * pCreateInfo)327 VkExternalMemoryHandleTypeFlags getExternalTypesHandle (const VkImageCreateInfo* pCreateInfo)
328 {
329 const VkExternalMemoryImageCreateInfo* const externalInfo = findStructure<VkExternalMemoryImageCreateInfo> (pCreateInfo->pNext);
330
331 return externalInfo ? externalInfo->handleTypes : 0u;
332 }
333
334 class Image
335 {
336 public:
Image(VkDevice,const VkImageCreateInfo * pCreateInfo)337 Image (VkDevice, const VkImageCreateInfo* pCreateInfo)
338 : m_imageType (pCreateInfo->imageType)
339 , m_format (pCreateInfo->format)
340 , m_extent (pCreateInfo->extent)
341 , m_arrayLayers (pCreateInfo->arrayLayers)
342 , m_samples (pCreateInfo->samples)
343 , m_usage (pCreateInfo->usage)
344 , m_flags (pCreateInfo->flags)
345 , m_externalHandleTypes (getExternalTypesHandle(pCreateInfo))
346 {
347 }
348
getImageType(void) const349 VkImageType getImageType (void) const { return m_imageType; }
getFormat(void) const350 VkFormat getFormat (void) const { return m_format; }
getExtent(void) const351 VkExtent3D getExtent (void) const { return m_extent; }
getArrayLayers(void) const352 deUint32 getArrayLayers (void) const { return m_arrayLayers; }
getSamples(void) const353 VkSampleCountFlagBits getSamples (void) const { return m_samples; }
getUsage(void) const354 VkImageUsageFlags getUsage (void) const { return m_usage; }
getFlags(void) const355 VkImageCreateFlags getFlags (void) const { return m_flags; }
getExternalHandleTypes(void) const356 VkExternalMemoryHandleTypeFlags getExternalHandleTypes (void) const { return m_externalHandleTypes; }
357
358 private:
359 const VkImageType m_imageType;
360 const VkFormat m_format;
361 const VkExtent3D m_extent;
362 const deUint32 m_arrayLayers;
363 const VkSampleCountFlagBits m_samples;
364 const VkImageUsageFlags m_usage;
365 const VkImageCreateFlags m_flags;
366 const VkExternalMemoryHandleTypeFlags m_externalHandleTypes;
367 };
368
allocateHeap(const VkMemoryAllocateInfo * pAllocInfo)369 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
370 {
371 // \todo [2015-12-03 pyry] Alignment requirements?
372 // \todo [2015-12-03 pyry] Empty allocations okay?
373 if (pAllocInfo->allocationSize > 0)
374 {
375 void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
376 if (!heapPtr)
377 throw std::bad_alloc();
378 return heapPtr;
379 }
380 else
381 return DE_NULL;
382 }
383
freeHeap(void * ptr)384 void freeHeap (void* ptr)
385 {
386 deFree(ptr);
387 }
388
389 class DeviceMemory
390 {
391 public:
~DeviceMemory(void)392 virtual ~DeviceMemory (void) {}
393 virtual void* map (void) = 0;
394 virtual void unmap (void) = 0;
395 };
396
397 class PrivateDeviceMemory : public DeviceMemory
398 {
399 public:
PrivateDeviceMemory(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)400 PrivateDeviceMemory (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
401 : m_memory(allocateHeap(pAllocInfo))
402 {
403 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
404 // but currently it mostly hinders it.
405 if (m_memory)
406 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
407 }
~PrivateDeviceMemory(void)408 virtual ~PrivateDeviceMemory (void)
409 {
410 freeHeap(m_memory);
411 }
412
map(void)413 virtual void* map (void) /*override*/ { return m_memory; }
unmap(void)414 virtual void unmap (void) /*override*/ {}
415
416 private:
417 void* const m_memory;
418 };
419
420 #ifndef CTS_USES_VULKANSC
421
422 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
findOrCreateHwBuffer(const VkMemoryAllocateInfo * pAllocInfo)423 AHardwareBuffer* findOrCreateHwBuffer (const VkMemoryAllocateInfo* pAllocInfo)
424 {
425 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
426 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
427 const VkMemoryDedicatedAllocateInfo* const dedicatedInfo = findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
428 const Image* const image = dedicatedInfo && !!dedicatedInfo->image ? reinterpret_cast<const Image*>(dedicatedInfo->image.getInternal()) : DE_NULL;
429 AHardwareBuffer* hwbuffer = DE_NULL;
430
431 // Import and export aren't mutually exclusive; we can have both simultaneously.
432 DE_ASSERT((importInfo && importInfo->buffer.internal) ||
433 (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
434
435 if (importInfo && importInfo->buffer.internal)
436 {
437 hwbuffer = (AHardwareBuffer*)importInfo->buffer.internal;
438 AHardwareBuffer_acquire(hwbuffer);
439 }
440 else if (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
441 {
442 AHardwareBuffer_Desc hwbufferDesc;
443 deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
444
445 if (image)
446 {
447 hwbufferDesc.width = image->getExtent().width;
448 hwbufferDesc.height = image->getExtent().height;
449 hwbufferDesc.layers = image->getArrayLayers();
450 switch (image->getFormat())
451 {
452 case VK_FORMAT_R8G8B8A8_UNORM:
453 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
454 break;
455 case VK_FORMAT_R8G8B8_UNORM:
456 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
457 break;
458 case VK_FORMAT_R5G6B5_UNORM_PACK16:
459 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
460 break;
461 case VK_FORMAT_R16G16B16A16_SFLOAT:
462 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
463 break;
464 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
465 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
466 break;
467 default:
468 DE_FATAL("Unsupported image format for Android hardware buffer export");
469 break;
470 }
471 if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
472 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
473 if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
474 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
475 // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
476 // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
477
478 // Make sure we have at least one AHB GPU usage, even if the image doesn't have any
479 // Vulkan usages with corresponding to AHB GPU usages.
480 if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
481 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
482 }
483 else
484 {
485 hwbufferDesc.width = static_cast<deUint32>(pAllocInfo->allocationSize);
486 hwbufferDesc.height = 1,
487 hwbufferDesc.layers = 1,
488 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
489 hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
490 }
491
492 AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
493 }
494
495 return hwbuffer;
496 }
497
498 class ExternalDeviceMemoryAndroid : public DeviceMemory
499 {
500 public:
ExternalDeviceMemoryAndroid(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)501 ExternalDeviceMemoryAndroid (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
502 : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
503 {}
~ExternalDeviceMemoryAndroid(void)504 virtual ~ExternalDeviceMemoryAndroid (void)
505 {
506 if (m_hwbuffer)
507 AHardwareBuffer_release(m_hwbuffer);
508 }
509
map(void)510 virtual void* map (void) /*override*/
511 {
512 void* p;
513 AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, NULL, &p);
514 return p;
515 }
516
unmap(void)517 virtual void unmap (void) /*override*/ { AHardwareBuffer_unlock(m_hwbuffer, NULL); }
518
getHwBuffer(void)519 AHardwareBuffer* getHwBuffer (void) { return m_hwbuffer; }
520
521 private:
522 AHardwareBuffer* const m_hwbuffer;
523 };
524 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
525
526 #endif // CTS_USES_VULKANSC
527
528 class DeferredOperationKHR
529 {
530 public:
DeferredOperationKHR(VkDevice)531 DeferredOperationKHR (VkDevice)
532 {}
533 };
534
535 class CommandBuffer
536 {
537 public:
CommandBuffer(VkDevice,VkCommandPool,VkCommandBufferLevel)538 CommandBuffer (VkDevice, VkCommandPool, VkCommandBufferLevel)
539 {}
540 };
541
542 class CommandPool
543 {
544 public:
CommandPool(VkDevice device,const VkCommandPoolCreateInfo *)545 CommandPool (VkDevice device, const VkCommandPoolCreateInfo*)
546 : m_device(device)
547 {}
548 #ifndef CTS_USES_VULKANSC
549 ~CommandPool (void);
550 #endif // CTS_USES_VULKANSC
551
552 VkCommandBuffer allocate (VkCommandBufferLevel level);
553 void free (VkCommandBuffer buffer);
554
555 private:
556 const VkDevice m_device;
557
558 vector<CommandBuffer*> m_buffers;
559 };
560
561 #ifndef CTS_USES_VULKANSC
562
~CommandPool(void)563 CommandPool::~CommandPool (void)
564 {
565 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
566 delete m_buffers[ndx];
567 }
568
569 #endif // CTS_USES_VULKANSC
570
allocate(VkCommandBufferLevel level)571 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level)
572 {
573 CommandBuffer* const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level);
574
575 try
576 {
577 m_buffers.push_back(impl);
578 }
579 catch (...)
580 {
581 delete impl;
582 throw;
583 }
584
585 return reinterpret_cast<VkCommandBuffer>(impl);
586 }
587
free(VkCommandBuffer buffer)588 void CommandPool::free (VkCommandBuffer buffer)
589 {
590 CommandBuffer* const impl = reinterpret_cast<CommandBuffer*>(buffer);
591
592 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
593 {
594 if (m_buffers[ndx] == impl)
595 {
596 std::swap(m_buffers[ndx], m_buffers.back());
597 m_buffers.pop_back();
598 delete impl;
599 return;
600 }
601 }
602
603 DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
604 }
605
606 class DescriptorSet
607 {
608 public:
DescriptorSet(VkDevice,VkDescriptorPool,VkDescriptorSetLayout)609 DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
610 };
611
612 class DescriptorPool
613 {
614 public:
DescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo)615 DescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
616 : m_device (device)
617 , m_flags (pCreateInfo->flags)
618 {}
~DescriptorPool(void)619 ~DescriptorPool (void)
620 {
621 reset();
622 }
623
624 VkDescriptorSet allocate (VkDescriptorSetLayout setLayout);
625 void free (VkDescriptorSet set);
626
627 void reset (void);
628
629 private:
630 const VkDevice m_device;
631 const VkDescriptorPoolCreateFlags m_flags;
632
633 vector<DescriptorSet*> m_managedSets;
634 };
635
allocate(VkDescriptorSetLayout setLayout)636 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
637 {
638 DescriptorSet* const impl = new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
639
640 try
641 {
642 m_managedSets.push_back(impl);
643 }
644 catch (...)
645 {
646 delete impl;
647 throw;
648 }
649
650 return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
651 }
652
free(VkDescriptorSet set)653 void DescriptorPool::free (VkDescriptorSet set)
654 {
655 DescriptorSet* const impl = reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
656
657 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
658 DE_UNREF(m_flags);
659
660 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
661 {
662 if (m_managedSets[ndx] == impl)
663 {
664 std::swap(m_managedSets[ndx], m_managedSets.back());
665 m_managedSets.pop_back();
666 delete impl;
667 return;
668 }
669 }
670
671 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
672 }
673
reset(void)674 void DescriptorPool::reset (void)
675 {
676 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
677 delete m_managedSets[ndx];
678 m_managedSets.clear();
679 }
680
681 // API implementation
682
683 extern "C"
684 {
685
getDeviceProcAddr(VkDevice device,const char * pName)686 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
687 {
688 return reinterpret_cast<Device*>(device)->getProcAddr(pName);
689 }
690
createGraphicsPipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)691 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
692 {
693 deUint32 allocNdx;
694 try
695 {
696 for (allocNdx = 0; allocNdx < count; allocNdx++)
697 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
698
699 return VK_SUCCESS;
700 }
701 catch (const std::bad_alloc&)
702 {
703 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
704 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
705
706 return VK_ERROR_OUT_OF_HOST_MEMORY;
707 }
708 catch (VkResult err)
709 {
710 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
711 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
712
713 return err;
714 }
715 }
716
createComputePipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)717 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
718 {
719 deUint32 allocNdx;
720 try
721 {
722 for (allocNdx = 0; allocNdx < count; allocNdx++)
723 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
724
725 return VK_SUCCESS;
726 }
727 catch (const std::bad_alloc&)
728 {
729 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
730 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
731
732 return VK_ERROR_OUT_OF_HOST_MEMORY;
733 }
734 catch (VkResult err)
735 {
736 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
737 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
738
739 return err;
740 }
741 }
742
743 #ifndef CTS_USES_VULKANSC
744
createRayTracingPipelinesNV(VkDevice device,VkPipelineCache,deUint32 count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)745 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesNV (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
746 {
747 deUint32 allocNdx;
748 try
749 {
750 for (allocNdx = 0; allocNdx < count; allocNdx++)
751 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
752
753 return VK_SUCCESS;
754 }
755 catch (const std::bad_alloc&)
756 {
757 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
758 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
759
760 return VK_ERROR_OUT_OF_HOST_MEMORY;
761 }
762 catch (VkResult err)
763 {
764 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
765 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
766
767 return err;
768 }
769 }
770
createRayTracingPipelinesKHR(VkDevice device,VkPipelineCache,deUint32 count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)771 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesKHR (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
772 {
773 deUint32 allocNdx;
774 try
775 {
776 for (allocNdx = 0; allocNdx < count; allocNdx++)
777 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
778
779 return VK_SUCCESS;
780 }
781 catch (const std::bad_alloc&)
782 {
783 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
784 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
785
786 return VK_ERROR_OUT_OF_HOST_MEMORY;
787 }
788 catch (VkResult err)
789 {
790 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
791 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
792
793 return err;
794 }
795 }
796
797 #endif // CTS_USES_VULKANSC
798
enumeratePhysicalDevices(VkInstance,deUint32 * pPhysicalDeviceCount,VkPhysicalDevice * pDevices)799 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
800 {
801 if (pDevices && *pPhysicalDeviceCount >= 1u)
802 *pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
803
804 *pPhysicalDeviceCount = 1;
805
806 return VK_SUCCESS;
807 }
808
enumerateExtensions(deUint32 numExtensions,const VkExtensionProperties * extensions,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)809 VkResult enumerateExtensions (deUint32 numExtensions, const VkExtensionProperties* extensions, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
810 {
811 const deUint32 dstSize = pPropertyCount ? *pPropertyCount : 0;
812
813 if (pPropertyCount)
814 *pPropertyCount = numExtensions;
815
816 if (pProperties)
817 {
818 for (deUint32 ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
819 pProperties[ndx] = extensions[ndx];
820
821 if (dstSize < numExtensions)
822 return VK_INCOMPLETE;
823 }
824
825 return VK_SUCCESS;
826 }
827
enumerateInstanceExtensionProperties(const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)828 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
829 {
830 static const VkExtensionProperties s_extensions[] =
831 {
832 { "VK_KHR_get_physical_device_properties2", 1u },
833 { "VK_KHR_external_memory_capabilities", 1u },
834 };
835
836 if (!pLayerName)
837 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
838 else
839 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
840 }
841
enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)842 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
843 {
844 DE_UNREF(physicalDevice);
845
846 static const VkExtensionProperties s_extensions[] =
847 {
848 { "VK_KHR_bind_memory2", 1u },
849 { "VK_KHR_external_memory", 1u },
850 { "VK_KHR_get_memory_requirements2", 1u },
851 { "VK_KHR_maintenance1", 1u },
852 { "VK_KHR_sampler_ycbcr_conversion", 1u },
853 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
854 { "VK_ANDROID_external_memory_android_hardware_buffer", 1u },
855 #endif
856 };
857
858 if (!pLayerName)
859 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
860 else
861 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
862 }
863
getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)864 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
865 {
866 DE_UNREF(physicalDevice);
867
868 // Enable all features allow as many tests to run as possible
869 pFeatures->robustBufferAccess = VK_TRUE;
870 pFeatures->fullDrawIndexUint32 = VK_TRUE;
871 pFeatures->imageCubeArray = VK_TRUE;
872 pFeatures->independentBlend = VK_TRUE;
873 pFeatures->geometryShader = VK_TRUE;
874 pFeatures->tessellationShader = VK_TRUE;
875 pFeatures->sampleRateShading = VK_TRUE;
876 pFeatures->dualSrcBlend = VK_TRUE;
877 pFeatures->logicOp = VK_TRUE;
878 pFeatures->multiDrawIndirect = VK_TRUE;
879 pFeatures->drawIndirectFirstInstance = VK_TRUE;
880 pFeatures->depthClamp = VK_TRUE;
881 pFeatures->depthBiasClamp = VK_TRUE;
882 pFeatures->fillModeNonSolid = VK_TRUE;
883 pFeatures->depthBounds = VK_TRUE;
884 pFeatures->wideLines = VK_TRUE;
885 pFeatures->largePoints = VK_TRUE;
886 pFeatures->alphaToOne = VK_TRUE;
887 pFeatures->multiViewport = VK_TRUE;
888 pFeatures->samplerAnisotropy = VK_TRUE;
889 pFeatures->textureCompressionETC2 = VK_TRUE;
890 pFeatures->textureCompressionASTC_LDR = VK_TRUE;
891 pFeatures->textureCompressionBC = VK_TRUE;
892 pFeatures->occlusionQueryPrecise = VK_TRUE;
893 pFeatures->pipelineStatisticsQuery = VK_TRUE;
894 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE;
895 pFeatures->fragmentStoresAndAtomics = VK_TRUE;
896 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE;
897 pFeatures->shaderImageGatherExtended = VK_TRUE;
898 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE;
899 pFeatures->shaderStorageImageMultisample = VK_TRUE;
900 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE;
901 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE;
902 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
903 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE;
904 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
905 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE;
906 pFeatures->shaderClipDistance = VK_TRUE;
907 pFeatures->shaderCullDistance = VK_TRUE;
908 pFeatures->shaderFloat64 = VK_TRUE;
909 pFeatures->shaderInt64 = VK_TRUE;
910 pFeatures->shaderInt16 = VK_TRUE;
911 pFeatures->shaderResourceResidency = VK_TRUE;
912 pFeatures->shaderResourceMinLod = VK_TRUE;
913 pFeatures->sparseBinding = VK_TRUE;
914 pFeatures->sparseResidencyBuffer = VK_TRUE;
915 pFeatures->sparseResidencyImage2D = VK_TRUE;
916 pFeatures->sparseResidencyImage3D = VK_TRUE;
917 pFeatures->sparseResidency2Samples = VK_TRUE;
918 pFeatures->sparseResidency4Samples = VK_TRUE;
919 pFeatures->sparseResidency8Samples = VK_TRUE;
920 pFeatures->sparseResidency16Samples = VK_TRUE;
921 pFeatures->sparseResidencyAliased = VK_TRUE;
922 pFeatures->variableMultisampleRate = VK_TRUE;
923 pFeatures->inheritedQueries = VK_TRUE;
924 }
925
getPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * props)926 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
927 {
928 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
929
930 props->apiVersion = VK_API_VERSION_1_1;
931 props->driverVersion = 1u;
932 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
933
934 deMemcpy(props->deviceName, "null", 5);
935
936 // Spec minmax
937 props->limits.maxImageDimension1D = 4096;
938 props->limits.maxImageDimension2D = 4096;
939 props->limits.maxImageDimension3D = 256;
940 props->limits.maxImageDimensionCube = 4096;
941 props->limits.maxImageArrayLayers = 256;
942 props->limits.maxTexelBufferElements = 65536;
943 props->limits.maxUniformBufferRange = 16384;
944 props->limits.maxStorageBufferRange = 1u<<27;
945 props->limits.maxPushConstantsSize = 128;
946 props->limits.maxMemoryAllocationCount = 4096;
947 props->limits.maxSamplerAllocationCount = 4000;
948 props->limits.bufferImageGranularity = 131072;
949 props->limits.sparseAddressSpaceSize = 1u<<31;
950 props->limits.maxBoundDescriptorSets = 4;
951 props->limits.maxPerStageDescriptorSamplers = 16;
952 props->limits.maxPerStageDescriptorUniformBuffers = 12;
953 props->limits.maxPerStageDescriptorStorageBuffers = 4;
954 props->limits.maxPerStageDescriptorSampledImages = 16;
955 props->limits.maxPerStageDescriptorStorageImages = 4;
956 props->limits.maxPerStageDescriptorInputAttachments = 4;
957 props->limits.maxPerStageResources = 128;
958 props->limits.maxDescriptorSetSamplers = 96;
959 props->limits.maxDescriptorSetUniformBuffers = 72;
960 props->limits.maxDescriptorSetUniformBuffersDynamic = 8;
961 props->limits.maxDescriptorSetStorageBuffers = 24;
962 props->limits.maxDescriptorSetStorageBuffersDynamic = 4;
963 props->limits.maxDescriptorSetSampledImages = 96;
964 props->limits.maxDescriptorSetStorageImages = 24;
965 props->limits.maxDescriptorSetInputAttachments = 4;
966 props->limits.maxVertexInputAttributes = 16;
967 props->limits.maxVertexInputBindings = 16;
968 props->limits.maxVertexInputAttributeOffset = 2047;
969 props->limits.maxVertexInputBindingStride = 2048;
970 props->limits.maxVertexOutputComponents = 64;
971 props->limits.maxTessellationGenerationLevel = 64;
972 props->limits.maxTessellationPatchSize = 32;
973 props->limits.maxTessellationControlPerVertexInputComponents = 64;
974 props->limits.maxTessellationControlPerVertexOutputComponents = 64;
975 props->limits.maxTessellationControlPerPatchOutputComponents = 120;
976 props->limits.maxTessellationControlTotalOutputComponents = 2048;
977 props->limits.maxTessellationEvaluationInputComponents = 64;
978 props->limits.maxTessellationEvaluationOutputComponents = 64;
979 props->limits.maxGeometryShaderInvocations = 32;
980 props->limits.maxGeometryInputComponents = 64;
981 props->limits.maxGeometryOutputComponents = 64;
982 props->limits.maxGeometryOutputVertices = 256;
983 props->limits.maxGeometryTotalOutputComponents = 1024;
984 props->limits.maxFragmentInputComponents = 64;
985 props->limits.maxFragmentOutputAttachments = 4;
986 props->limits.maxFragmentDualSrcAttachments = 1;
987 props->limits.maxFragmentCombinedOutputResources = 4;
988 props->limits.maxComputeSharedMemorySize = 16384;
989 props->limits.maxComputeWorkGroupCount[0] = 65535;
990 props->limits.maxComputeWorkGroupCount[1] = 65535;
991 props->limits.maxComputeWorkGroupCount[2] = 65535;
992 props->limits.maxComputeWorkGroupInvocations = 128;
993 props->limits.maxComputeWorkGroupSize[0] = 128;
994 props->limits.maxComputeWorkGroupSize[1] = 128;
995 props->limits.maxComputeWorkGroupSize[2] = 128;
996 props->limits.subPixelPrecisionBits = 4;
997 props->limits.subTexelPrecisionBits = 4;
998 props->limits.mipmapPrecisionBits = 4;
999 props->limits.maxDrawIndexedIndexValue = 0xffffffffu;
1000 props->limits.maxDrawIndirectCount = (1u<<16) - 1u;
1001 props->limits.maxSamplerLodBias = 2.0f;
1002 props->limits.maxSamplerAnisotropy = 16.0f;
1003 props->limits.maxViewports = 16;
1004 props->limits.maxViewportDimensions[0] = 4096;
1005 props->limits.maxViewportDimensions[1] = 4096;
1006 props->limits.viewportBoundsRange[0] = -8192.f;
1007 props->limits.viewportBoundsRange[1] = 8191.f;
1008 props->limits.viewportSubPixelBits = 0;
1009 props->limits.minMemoryMapAlignment = 64;
1010 props->limits.minTexelBufferOffsetAlignment = 256;
1011 props->limits.minUniformBufferOffsetAlignment = 256;
1012 props->limits.minStorageBufferOffsetAlignment = 256;
1013 props->limits.minTexelOffset = -8;
1014 props->limits.maxTexelOffset = 7;
1015 props->limits.minTexelGatherOffset = -8;
1016 props->limits.maxTexelGatherOffset = 7;
1017 props->limits.minInterpolationOffset = -0.5f;
1018 props->limits.maxInterpolationOffset = 0.5f; // -1ulp
1019 props->limits.subPixelInterpolationOffsetBits = 4;
1020 props->limits.maxFramebufferWidth = 4096;
1021 props->limits.maxFramebufferHeight = 4096;
1022 props->limits.maxFramebufferLayers = 256;
1023 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1024 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1025 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1026 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1027 props->limits.maxColorAttachments = 4;
1028 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1029 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT;
1030 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1031 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1032 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1033 props->limits.maxSampleMaskWords = 1;
1034 props->limits.timestampComputeAndGraphics = VK_TRUE;
1035 props->limits.timestampPeriod = 1.0f;
1036 props->limits.maxClipDistances = 8;
1037 props->limits.maxCullDistances = 8;
1038 props->limits.maxCombinedClipAndCullDistances = 8;
1039 props->limits.discreteQueuePriorities = 2;
1040 props->limits.pointSizeRange[0] = 1.0f;
1041 props->limits.pointSizeRange[1] = 64.0f; // -1ulp
1042 props->limits.lineWidthRange[0] = 1.0f;
1043 props->limits.lineWidthRange[1] = 8.0f; // -1ulp
1044 props->limits.pointSizeGranularity = 1.0f;
1045 props->limits.lineWidthGranularity = 1.0f;
1046 props->limits.strictLines = 0;
1047 props->limits.standardSampleLocations = VK_TRUE;
1048 props->limits.optimalBufferCopyOffsetAlignment = 256;
1049 props->limits.optimalBufferCopyRowPitchAlignment = 256;
1050 props->limits.nonCoherentAtomSize = 128;
1051 }
1052
getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,deUint32 * count,VkQueueFamilyProperties * props)1053 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
1054 {
1055 if (props && *count >= 1u)
1056 {
1057 deMemset(props, 0, sizeof(VkQueueFamilyProperties));
1058
1059 props->queueCount = 4u;
1060 props->queueFlags = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
1061 props->timestampValidBits = 64;
1062 }
1063
1064 *count = 1u;
1065 }
1066
getPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * props)1067 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
1068 {
1069 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
1070
1071 props->memoryTypeCount = 1u;
1072 props->memoryTypes[0].heapIndex = 0u;
1073 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
1074 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
1075 | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1076
1077 props->memoryHeapCount = 1u;
1078 props->memoryHeaps[0].size = 1ull << 31;
1079 props->memoryHeaps[0].flags = 0u;
1080 }
1081
getPhysicalDeviceFormatProperties(VkPhysicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1082 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
1083 {
1084 const VkFormatFeatureFlags allFeatures = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
1085 | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
1086 | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
1087 | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
1088 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
1089 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
1090 | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
1091 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
1092 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
1093 | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
1094 | VK_FORMAT_FEATURE_BLIT_SRC_BIT
1095 | VK_FORMAT_FEATURE_BLIT_DST_BIT
1096 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
1097 | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
1098 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
1099 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
1100 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
1101 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
1102 | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1103
1104 pFormatProperties->linearTilingFeatures = allFeatures;
1105 pFormatProperties->optimalTilingFeatures = allFeatures;
1106 pFormatProperties->bufferFeatures = allFeatures;
1107
1108 if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1109 pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1110 }
1111
getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1112 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
1113 {
1114 DE_UNREF(physicalDevice);
1115 DE_UNREF(format);
1116 DE_UNREF(type);
1117 DE_UNREF(tiling);
1118 DE_UNREF(usage);
1119 DE_UNREF(flags);
1120
1121 pImageFormatProperties->maxArrayLayers = 8;
1122 pImageFormatProperties->maxExtent.width = 4096;
1123 pImageFormatProperties->maxExtent.height = 4096;
1124 pImageFormatProperties->maxExtent.depth = 4096;
1125 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1;
1126 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u;
1127 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1128
1129 return VK_SUCCESS;
1130 }
1131
getDeviceQueue(VkDevice device,deUint32 queueFamilyIndex,deUint32 queueIndex,VkQueue * pQueue)1132 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
1133 {
1134 DE_UNREF(device);
1135 DE_UNREF(queueFamilyIndex);
1136
1137 if (pQueue)
1138 *pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1);
1139 }
1140
getBufferMemoryRequirements(VkDevice,VkBuffer bufferHandle,VkMemoryRequirements * requirements)1141 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
1142 {
1143 const Buffer* buffer = reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
1144
1145 requirements->memoryTypeBits = 1u;
1146 requirements->size = buffer->getSize();
1147 requirements->alignment = (VkDeviceSize)1u;
1148 }
1149
getPackedImageDataSize(VkFormat format,VkExtent3D extent,VkSampleCountFlagBits samples)1150 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1151 {
1152 return (VkDeviceSize)getPixelSize(mapVkFormat(format))
1153 * (VkDeviceSize)extent.width
1154 * (VkDeviceSize)extent.height
1155 * (VkDeviceSize)extent.depth
1156 * (VkDeviceSize)samples;
1157 }
1158
getCompressedImageDataSize(VkFormat format,VkExtent3D extent)1159 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
1160 {
1161 try
1162 {
1163 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
1164 const size_t blockSize = tcu::getBlockSize(tcuFormat);
1165 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat);
1166 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x());
1167 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y());
1168 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1169
1170 return blockSize*numBlocksX*numBlocksY*numBlocksZ;
1171 }
1172 catch (...)
1173 {
1174 return 0; // Unsupported compressed format
1175 }
1176 }
1177
getYCbCrImageDataSize(VkFormat format,VkExtent3D extent)1178 VkDeviceSize getYCbCrImageDataSize (VkFormat format, VkExtent3D extent)
1179 {
1180 const PlanarFormatDescription desc = getPlanarFormatDescription(format);
1181 VkDeviceSize totalSize = 0;
1182
1183 DE_ASSERT(extent.depth == 1);
1184
1185 for (deUint32 planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1186 {
1187 const deUint32 elementSize = desc.planes[planeNdx].elementSizeBytes;
1188
1189 totalSize = (VkDeviceSize)deAlign64((deInt64)totalSize, elementSize);
1190 totalSize += getPlaneSizeInBytes(desc, extent, planeNdx, 0, BUFFER_IMAGE_COPY_OFFSET_GRANULARITY);
1191 }
1192
1193 return totalSize;
1194 }
1195
getImageMemoryRequirements(VkDevice,VkImage imageHandle,VkMemoryRequirements * requirements)1196 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
1197 {
1198 const Image* image = reinterpret_cast<const Image*>(imageHandle.getInternal());
1199
1200 requirements->memoryTypeBits = 1u;
1201 requirements->alignment = 16u;
1202
1203 if (isCompressedFormat(image->getFormat()))
1204 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1205 else if (isYCbCrFormat(image->getFormat()))
1206 requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1207 else
1208 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1209 }
1210
allocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1211 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
1212 {
1213 #ifndef CTS_USES_VULKANSC
1214 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1215 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1216
1217 if ((exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
1218 || (importInfo && importInfo->buffer.internal))
1219 {
1220 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1221 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1222 #else
1223 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1224 #endif
1225 }
1226 else
1227 {
1228 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1229 }
1230 #else // CTS_USES_VULKANSC
1231 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1232 #endif // CTS_USES_VULKANSC
1233 }
1234
mapMemory(VkDevice,VkDeviceMemory memHandle,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1235 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
1236 {
1237 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1238
1239 DE_UNREF(size);
1240 DE_UNREF(flags);
1241
1242 *ppData = (deUint8*)memory->map() + offset;
1243
1244 return VK_SUCCESS;
1245 }
1246
unmapMemory(VkDevice device,VkDeviceMemory memHandle)1247 VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memHandle)
1248 {
1249 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1250
1251 DE_UNREF(device);
1252
1253 memory->unmap();
1254 }
1255
1256 #ifndef CTS_USES_VULKANSC
1257
getMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,pt::AndroidHardwareBufferPtr * pBuffer)1258 VKAPI_ATTR VkResult VKAPI_CALL getMemoryAndroidHardwareBufferANDROID (VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, pt::AndroidHardwareBufferPtr* pBuffer)
1259 {
1260 DE_UNREF(device);
1261
1262 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1263 DeviceMemory* const memory = reinterpret_cast<ExternalDeviceMemoryAndroid*>(pInfo->memory.getInternal());
1264 ExternalDeviceMemoryAndroid* const androidMemory = static_cast<ExternalDeviceMemoryAndroid*>(memory);
1265
1266 AHardwareBuffer* hwbuffer = androidMemory->getHwBuffer();
1267 AHardwareBuffer_acquire(hwbuffer);
1268 pBuffer->internal = hwbuffer;
1269 #else
1270 DE_UNREF(pInfo);
1271 DE_UNREF(pBuffer);
1272 #endif
1273
1274 return VK_SUCCESS;
1275 }
1276
1277 #endif // CTS_USES_VULKANSC
1278
allocateDescriptorSets(VkDevice,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1279 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
1280 {
1281 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
1282
1283 for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1284 {
1285 try
1286 {
1287 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1288 }
1289 catch (const std::bad_alloc&)
1290 {
1291 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1292 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1293
1294 return VK_ERROR_OUT_OF_HOST_MEMORY;
1295 }
1296 catch (VkResult res)
1297 {
1298 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1299 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1300
1301 return res;
1302 }
1303 }
1304
1305 return VK_SUCCESS;
1306 }
1307
freeDescriptorSets(VkDevice,VkDescriptorPool descriptorPool,deUint32 count,const VkDescriptorSet * pDescriptorSets)1308 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
1309 {
1310 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1311
1312 for (deUint32 ndx = 0; ndx < count; ++ndx)
1313 poolImpl->free(pDescriptorSets[ndx]);
1314 }
1315
resetDescriptorPool(VkDevice,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags)1316 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
1317 {
1318 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1319
1320 poolImpl->reset();
1321
1322 return VK_SUCCESS;
1323 }
1324
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)1325 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
1326 {
1327 DE_UNREF(device);
1328
1329 if (pAllocateInfo && pCommandBuffers)
1330 {
1331 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal());
1332
1333 for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1334 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1335 }
1336
1337 return VK_SUCCESS;
1338 }
1339
freeCommandBuffers(VkDevice device,VkCommandPool commandPool,deUint32 commandBufferCount,const VkCommandBuffer * pCommandBuffers)1340 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
1341 {
1342 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal());
1343
1344 DE_UNREF(device);
1345
1346 for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
1347 poolImpl->free(pCommandBuffers[ndx]);
1348 }
1349
1350
createDisplayModeKHR(VkPhysicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)1351 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode)
1352 {
1353 DE_UNREF(pAllocator);
1354 VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1355 }
1356
createSharedSwapchainsKHR(VkDevice device,deUint32 swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)1357 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains)
1358 {
1359 for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx)
1360 {
1361 pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator);
1362 }
1363
1364 return VK_SUCCESS;
1365 }
1366
getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1367 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties)
1368 {
1369 DE_UNREF(physicalDevice);
1370 DE_UNREF(pExternalBufferInfo);
1371
1372 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
1373 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1374 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0;
1375
1376 #ifndef CTS_USES_VULKANSC
1377 if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1378 {
1379 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1380 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1381 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1382 }
1383 #endif // CTS_USES_VULKANSC
1384 }
1385
getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1386 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties)
1387 {
1388 #ifndef CTS_USES_VULKANSC
1389 const VkPhysicalDeviceExternalImageFormatInfo* const externalInfo = findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1390 VkExternalImageFormatProperties* const externalProperties = findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1391 VkResult result;
1392
1393 result = getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1394 if (result != VK_SUCCESS)
1395 return result;
1396
1397 if (externalInfo && externalInfo->handleType != 0)
1398 {
1399 if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1400 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1401
1402 if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM
1403 || pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM
1404 || pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16
1405 || pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT
1406 || pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1407 {
1408 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1409 }
1410
1411 if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1412 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1413
1414 if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT
1415 | VK_IMAGE_USAGE_TRANSFER_DST_BIT
1416 | VK_IMAGE_USAGE_SAMPLED_BIT
1417 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
1418 != 0)
1419 {
1420 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1421 }
1422
1423 if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1424 /*| VK_IMAGE_CREATE_PROTECTED_BIT*/
1425 /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT*/))
1426 != 0)
1427 {
1428 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1429 }
1430
1431 if (externalProperties)
1432 {
1433 externalProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT
1434 | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT
1435 | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1436 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1437 externalProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1438 }
1439 }
1440
1441 return VK_SUCCESS;
1442 #else // CTS_USES_VULKANSC
1443 return getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1444 #endif // CTS_USES_VULKANSC
1445 }
1446
1447 // \note getInstanceProcAddr is a little bit special:
1448 // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1449 // getInstanceProcAddr() implementation needs other entry points from
1450 // vkNullDriverImpl.inl.
1451 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName);
1452
1453 #include "vkNullDriverImpl.inl"
1454
getInstanceProcAddr(VkInstance instance,const char * pName)1455 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
1456 {
1457 if (instance)
1458 {
1459 return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
1460 }
1461 else
1462 {
1463 const std::string name = pName;
1464
1465 if (name == "vkCreateInstance")
1466 return (PFN_vkVoidFunction)createInstance;
1467 else if (name == "vkEnumerateInstanceExtensionProperties")
1468 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1469 else if (name == "vkEnumerateInstanceLayerProperties")
1470 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1471 else
1472 return (PFN_vkVoidFunction)DE_NULL;
1473 }
1474 }
1475
1476 } // extern "C"
1477
Instance(const VkInstanceCreateInfo *)1478 Instance::Instance (const VkInstanceCreateInfo*)
1479 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1480 {
1481 }
1482
Device(VkPhysicalDevice,const VkDeviceCreateInfo *)1483 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
1484 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1485 {
1486 }
1487
1488 class NullDriverLibrary : public Library
1489 {
1490 public:
NullDriverLibrary(void)1491 NullDriverLibrary (void)
1492 : m_library (s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1493 , m_driver (m_library)
1494 {}
1495
getPlatformInterface(void) const1496 const PlatformInterface& getPlatformInterface (void) const { return m_driver; }
getFunctionLibrary(void) const1497 const tcu::FunctionLibrary& getFunctionLibrary (void) const { return m_library; }
1498 private:
1499 const tcu::StaticFunctionLibrary m_library;
1500 const PlatformDriver m_driver;
1501 };
1502
1503 } // anonymous
1504
createNullDriver(void)1505 Library* createNullDriver (void)
1506 {
1507 return new NullDriverLibrary();
1508 }
1509
1510 } // vk
1511