• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*-------------------------------------------------------------------------
2  * Vulkan CTS Framework
3  * --------------------
4  *
5  * Copyright (c) 2015 Google Inc.
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  *
19  *//*!
20  * \file
21  * \brief Null (do-nothing) Vulkan implementation.
22  *//*--------------------------------------------------------------------*/
23 
24 #include "vkNullDriver.hpp"
25 #include "vkPlatform.hpp"
26 #include "vkImageUtil.hpp"
27 #include "vkQueryUtil.hpp"
28 #include "tcuFunctionLibrary.hpp"
29 #include "deMemory.h"
30 
31 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
32 #	define USE_ANDROID_O_HARDWARE_BUFFER
33 #endif
34 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
35 #	include <android/hardware_buffer.h>
36 #endif
37 
38 #include <stdexcept>
39 #include <algorithm>
40 
41 namespace vk
42 {
43 
44 namespace
45 {
46 
47 using std::vector;
48 
49 // Memory management
50 
51 template<typename T>
allocateSystemMem(const VkAllocationCallbacks * pAllocator,VkSystemAllocationScope scope)52 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
53 {
54 	void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
55 	if (!ptr)
56 		throw std::bad_alloc();
57 	return ptr;
58 }
59 
freeSystemMem(const VkAllocationCallbacks * pAllocator,void * mem)60 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
61 {
62 	pAllocator->pfnFree(pAllocator->pUserData, mem);
63 }
64 
65 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)66 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
67 {
68 	Object* obj = DE_NULL;
69 
70 	if (pAllocator)
71 	{
72 		void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
73 		try
74 		{
75 			obj = new (mem) Object(parent, pCreateInfo);
76 			DE_ASSERT(obj == mem);
77 		}
78 		catch (...)
79 		{
80 			pAllocator->pfnFree(pAllocator->pUserData, mem);
81 			throw;
82 		}
83 	}
84 	else
85 		obj = new Object(parent, pCreateInfo);
86 
87 	return reinterpret_cast<Handle>(obj);
88 }
89 
90 template<typename Object, typename Handle, typename CreateInfo>
allocateHandle(const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)91 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
92 {
93 	Object* obj = DE_NULL;
94 
95 	if (pAllocator)
96 	{
97 		void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
98 		try
99 		{
100 			obj = new (mem) Object(pCreateInfo);
101 			DE_ASSERT(obj == mem);
102 		}
103 		catch (...)
104 		{
105 			pAllocator->pfnFree(pAllocator->pUserData, mem);
106 			throw;
107 		}
108 	}
109 	else
110 		obj = new Object(pCreateInfo);
111 
112 	return reinterpret_cast<Handle>(obj);
113 }
114 
115 template<typename Object, typename Handle, typename Parent>
allocateHandle(Parent parent,const VkAllocationCallbacks * pAllocator)116 Handle allocateHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
117 {
118 	Object* obj = DE_NULL;
119 
120 	if (pAllocator)
121 	{
122 		void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
123 		try
124 		{
125 			obj = new (mem) Object(parent);
126 			DE_ASSERT(obj == mem);
127 		}
128 		catch (...)
129 		{
130 			pAllocator->pfnFree(pAllocator->pUserData, mem);
131 			throw;
132 		}
133 	}
134 	else
135 		obj = new Object(parent);
136 
137 	return reinterpret_cast<Handle>(obj);
138 }
139 
140 template<typename Object, typename Handle>
freeHandle(Handle handle,const VkAllocationCallbacks * pAllocator)141 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
142 {
143 	Object* obj = reinterpret_cast<Object*>(handle);
144 
145 	if (pAllocator)
146 	{
147 		obj->~Object();
148 		freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
149 	}
150 	else
151 		delete obj;
152 }
153 
154 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)155 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
156 {
157 	Object* const	obj		= allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator);
158 	return Handle((deUint64)(deUintptr)static_cast<BaseObject*>(obj));
159 }
160 
161 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)162 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
163 {
164 	return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
165 }
166 
167 template<typename Object, typename Handle, typename Parent>
allocateNonDispHandle(Parent parent,const VkAllocationCallbacks * pAllocator)168 Handle allocateNonDispHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
169 {
170 	Object* const	obj		= allocateHandle<Object, Object*>(parent, pAllocator);
171 	return Handle((deUint64)(deUintptr)obj);
172 }
173 
174 template<typename Object, typename Handle>
freeNonDispHandle(Handle handle,const VkAllocationCallbacks * pAllocator)175 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
176 {
177 	freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
178 }
179 
180 // Object definitions
181 
182 #define VK_NULL_RETURN(STMT)					\
183 	do {										\
184 		try {									\
185 			STMT;								\
186 			return VK_SUCCESS;					\
187 		} catch (const std::bad_alloc&) {		\
188 			return VK_ERROR_OUT_OF_HOST_MEMORY;	\
189 		} catch (VkResult res) {				\
190 			return res;							\
191 		}										\
192 	} while (deGetFalse())
193 
194 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
195 #define VK_NULL_FUNC_ENTRY(NAME, FUNC)	{ #NAME, (deFunctionPtr)FUNC }  // NOLINT(FUNC)
196 
197 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME)				\
198 struct NAME											\
199 {													\
200 	NAME (VkDevice, const Vk##NAME##CreateInfo*) {}	\
201 }
202 
203 #define VK_NULL_DEFINE_OBJ_WITH_POSTFIX(DEVICE_OR_INSTANCE, NAME, POSTFIX)			\
204 struct NAME##POSTFIX																\
205 {																					\
206 	NAME##POSTFIX (DEVICE_OR_INSTANCE, const Vk##NAME##CreateInfo##POSTFIX*) {}		\
207 };
208 
209 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
210 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
211 VK_NULL_DEFINE_DEVICE_OBJ(Event);
212 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
213 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
214 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
215 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
216 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
217 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
218 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
219 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
220 VK_NULL_DEFINE_DEVICE_OBJ(SamplerYcbcrConversion);
221 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Swapchain, KHR)
222 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugUtilsMessenger, EXT)
223 
224 #ifndef CTS_USES_VULKANSC
225 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
226 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorUpdateTemplate);
227 VK_NULL_DEFINE_DEVICE_OBJ(PrivateDataSlot);
228 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugReportCallback, EXT)
229 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuModule, NVX)
230 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuFunction, NVX)
231 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Micromap, EXT)
232 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, OpticalFlowSession, NV)
233 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, NV)
234 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, NV)
235 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, KHR)
236 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSession, KHR)
237 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSessionParameters, KHR)
238 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, ValidationCache, EXT)
239 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, BufferCollection, FUCHSIA)
240 #endif // CTS_USES_VULKANSC
241 
242 class Instance
243 {
244 public:
245 										Instance		(const VkInstanceCreateInfo* instanceInfo);
~Instance(void)246 										~Instance		(void) {}
247 
getProcAddr(const char * name) const248 	PFN_vkVoidFunction					getProcAddr		(const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
249 
250 private:
251 	const tcu::StaticFunctionLibrary	m_functions;
252 };
253 
254 class SurfaceKHR
255 {
256 public:
257 #ifndef CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkXlibSurfaceCreateInfoKHR *)258 										SurfaceKHR		(VkInstance, const VkXlibSurfaceCreateInfoKHR*)		{}
SurfaceKHR(VkInstance,const VkXcbSurfaceCreateInfoKHR *)259 										SurfaceKHR		(VkInstance, const VkXcbSurfaceCreateInfoKHR*)		{}
SurfaceKHR(VkInstance,const VkWaylandSurfaceCreateInfoKHR *)260 										SurfaceKHR		(VkInstance, const VkWaylandSurfaceCreateInfoKHR*)	{}
SurfaceKHR(VkInstance,const VkAndroidSurfaceCreateInfoKHR *)261 										SurfaceKHR		(VkInstance, const VkAndroidSurfaceCreateInfoKHR*)	{}
SurfaceKHR(VkInstance,const VkWin32SurfaceCreateInfoKHR *)262 										SurfaceKHR		(VkInstance, const VkWin32SurfaceCreateInfoKHR*)	{}
SurfaceKHR(VkInstance,const VkViSurfaceCreateInfoNN *)263 										SurfaceKHR		(VkInstance, const VkViSurfaceCreateInfoNN*)		{}
SurfaceKHR(VkInstance,const VkIOSSurfaceCreateInfoMVK *)264 										SurfaceKHR		(VkInstance, const VkIOSSurfaceCreateInfoMVK*)		{}
SurfaceKHR(VkInstance,const VkMacOSSurfaceCreateInfoMVK *)265 										SurfaceKHR		(VkInstance, const VkMacOSSurfaceCreateInfoMVK*)	{}
SurfaceKHR(VkInstance,const VkImagePipeSurfaceCreateInfoFUCHSIA *)266 										SurfaceKHR		(VkInstance, const VkImagePipeSurfaceCreateInfoFUCHSIA*)	{}
SurfaceKHR(VkInstance,const VkStreamDescriptorSurfaceCreateInfoGGP *)267 										SurfaceKHR		(VkInstance, const VkStreamDescriptorSurfaceCreateInfoGGP*)	{}
SurfaceKHR(VkInstance,const VkMetalSurfaceCreateInfoEXT *)268 										SurfaceKHR		(VkInstance, const VkMetalSurfaceCreateInfoEXT*)	{}
SurfaceKHR(VkInstance,const VkOHOSSurfaceCreateInfoOpenHarmony *)269                                         SurfaceKHR		(VkInstance, const VkOHOSSurfaceCreateInfoOpenHarmony*)	{}
SurfaceKHR(VkInstance,const VkScreenSurfaceCreateInfoQNX *)270 										SurfaceKHR		(VkInstance, const VkScreenSurfaceCreateInfoQNX*)	{}
271 #endif // CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkDisplaySurfaceCreateInfoKHR *)272 										SurfaceKHR		(VkInstance, const VkDisplaySurfaceCreateInfoKHR*)	{}
SurfaceKHR(VkInstance,const VkHeadlessSurfaceCreateInfoEXT *)273 										SurfaceKHR		(VkInstance, const VkHeadlessSurfaceCreateInfoEXT*)	{}
~SurfaceKHR(void)274 										~SurfaceKHR		(void)												{}
275 };
276 
277 class DisplayModeKHR
278 {
279 public:
DisplayModeKHR(VkDisplayKHR,const VkDisplayModeCreateInfoKHR *)280 										DisplayModeKHR	(VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {}
~DisplayModeKHR(void)281 										~DisplayModeKHR	(void) {}
282 };
283 
284 class Device
285 {
286 public:
287 										Device			(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
~Device(void)288 										~Device			(void) {}
289 
getProcAddr(const char * name) const290 	PFN_vkVoidFunction					getProcAddr		(const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
291 
292 private:
293 	const tcu::StaticFunctionLibrary	m_functions;
294 };
295 
296 class Pipeline
297 {
298 public:
Pipeline(VkDevice,const VkGraphicsPipelineCreateInfo *)299 	Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
Pipeline(VkDevice,const VkComputePipelineCreateInfo *)300 	Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
301 #ifndef CTS_USES_VULKANSC
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoNV *)302 	Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoNV*) {}
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoKHR *)303 	Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoKHR*) {}
304 #endif // CTS_USES_VULKANSC
305 };
306 
307 class RenderPass
308 {
309 public:
RenderPass(VkDevice,const VkRenderPassCreateInfo *)310 	RenderPass (VkDevice, const VkRenderPassCreateInfo*)		{}
RenderPass(VkDevice,const VkRenderPassCreateInfo2 *)311 	RenderPass (VkDevice, const VkRenderPassCreateInfo2*)		{}
312 };
313 
314 class Buffer
315 {
316 public:
Buffer(VkDevice,const VkBufferCreateInfo * pCreateInfo)317 						Buffer		(VkDevice, const VkBufferCreateInfo* pCreateInfo)
318 		: m_size (pCreateInfo->size)
319 	{
320 	}
321 
getSize(void) const322 	VkDeviceSize		getSize		(void) const { return m_size;	}
323 
324 private:
325 	const VkDeviceSize	m_size;
326 };
327 
getExternalTypesHandle(const VkImageCreateInfo * pCreateInfo)328 VkExternalMemoryHandleTypeFlags getExternalTypesHandle (const VkImageCreateInfo* pCreateInfo)
329 {
330 	const VkExternalMemoryImageCreateInfo* const	externalInfo	= findStructure<VkExternalMemoryImageCreateInfo>	(pCreateInfo->pNext);
331 
332 	return externalInfo ? externalInfo->handleTypes : 0u;
333 }
334 
335 class Image
336 {
337 public:
Image(VkDevice,const VkImageCreateInfo * pCreateInfo)338 												Image					(VkDevice, const VkImageCreateInfo* pCreateInfo)
339 		: m_imageType			(pCreateInfo->imageType)
340 		, m_format				(pCreateInfo->format)
341 		, m_extent				(pCreateInfo->extent)
342 		, m_arrayLayers			(pCreateInfo->arrayLayers)
343 		, m_samples				(pCreateInfo->samples)
344 		, m_usage				(pCreateInfo->usage)
345 		, m_flags				(pCreateInfo->flags)
346 		, m_externalHandleTypes	(getExternalTypesHandle(pCreateInfo))
347 	{
348 	}
349 
getImageType(void) const350 	VkImageType									getImageType			(void) const { return m_imageType;				}
getFormat(void) const351 	VkFormat									getFormat				(void) const { return m_format;					}
getExtent(void) const352 	VkExtent3D									getExtent				(void) const { return m_extent;					}
getArrayLayers(void) const353 	deUint32									getArrayLayers			(void) const { return m_arrayLayers;			}
getSamples(void) const354 	VkSampleCountFlagBits						getSamples				(void) const { return m_samples;				}
getUsage(void) const355 	VkImageUsageFlags							getUsage				(void) const { return m_usage;					}
getFlags(void) const356 	VkImageCreateFlags							getFlags				(void) const { return m_flags;					}
getExternalHandleTypes(void) const357 	VkExternalMemoryHandleTypeFlags				getExternalHandleTypes	(void) const { return m_externalHandleTypes;	}
358 
359 private:
360 	const VkImageType							m_imageType;
361 	const VkFormat								m_format;
362 	const VkExtent3D							m_extent;
363 	const deUint32								m_arrayLayers;
364 	const VkSampleCountFlagBits					m_samples;
365 	const VkImageUsageFlags						m_usage;
366 	const VkImageCreateFlags					m_flags;
367 	const VkExternalMemoryHandleTypeFlags		m_externalHandleTypes;
368 };
369 
allocateHeap(const VkMemoryAllocateInfo * pAllocInfo)370 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
371 {
372 	// \todo [2015-12-03 pyry] Alignment requirements?
373 	// \todo [2015-12-03 pyry] Empty allocations okay?
374 	if (pAllocInfo->allocationSize > 0)
375 	{
376 		void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
377 		if (!heapPtr)
378 			throw std::bad_alloc();
379 		return heapPtr;
380 	}
381 	else
382 		return DE_NULL;
383 }
384 
freeHeap(void * ptr)385 void freeHeap (void* ptr)
386 {
387 	deFree(ptr);
388 }
389 
390 class DeviceMemory
391 {
392 public:
~DeviceMemory(void)393 	virtual			~DeviceMemory	(void) {}
394 	virtual void*	map				(void) = 0;
395 	virtual void	unmap			(void) = 0;
396 };
397 
398 class PrivateDeviceMemory : public DeviceMemory
399 {
400 public:
PrivateDeviceMemory(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)401 						PrivateDeviceMemory		(VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
402 		: m_memory(allocateHeap(pAllocInfo))
403 	{
404 		// \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
405 		//						   but currently it mostly hinders it.
406 		if (m_memory)
407 			deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
408 	}
~PrivateDeviceMemory(void)409 	virtual				~PrivateDeviceMemory	(void)
410 	{
411 		freeHeap(m_memory);
412 	}
413 
map(void)414 	virtual void*		map						(void) /*override*/ { return m_memory; }
unmap(void)415 	virtual void		unmap					(void) /*override*/ {}
416 
417 private:
418 	void* const			m_memory;
419 };
420 
421 #ifndef CTS_USES_VULKANSC
422 
423 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
findOrCreateHwBuffer(const VkMemoryAllocateInfo * pAllocInfo)424 AHardwareBuffer* findOrCreateHwBuffer (const VkMemoryAllocateInfo* pAllocInfo)
425 {
426 	const VkExportMemoryAllocateInfo* const					exportInfo		= findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
427 	const VkImportAndroidHardwareBufferInfoANDROID* const	importInfo		= findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
428 	const VkMemoryDedicatedAllocateInfo* const				dedicatedInfo	= findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
429 	const Image* const										image			= dedicatedInfo && !!dedicatedInfo->image ? reinterpret_cast<const Image*>(dedicatedInfo->image.getInternal()) : DE_NULL;
430 	AHardwareBuffer*										hwbuffer		= DE_NULL;
431 
432 	// Import and export aren't mutually exclusive; we can have both simultaneously.
433 	DE_ASSERT((importInfo && importInfo->buffer.internal) ||
434 		(exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
435 
436 	if (importInfo && importInfo->buffer.internal)
437 	{
438 		hwbuffer = (AHardwareBuffer*)importInfo->buffer.internal;
439 		AHardwareBuffer_acquire(hwbuffer);
440 	}
441 	else if (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
442 	{
443 		AHardwareBuffer_Desc hwbufferDesc;
444 		deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
445 
446 		if (image)
447 		{
448 			hwbufferDesc.width	= image->getExtent().width;
449 			hwbufferDesc.height	= image->getExtent().height;
450 			hwbufferDesc.layers = image->getArrayLayers();
451 			switch (image->getFormat())
452 			{
453 				case VK_FORMAT_R8G8B8A8_UNORM:
454 					hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
455 					break;
456 				case VK_FORMAT_R8G8B8_UNORM:
457 					hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
458 					break;
459 				case VK_FORMAT_R5G6B5_UNORM_PACK16:
460 					hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
461 					break;
462 				case VK_FORMAT_R16G16B16A16_SFLOAT:
463 					hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
464 					break;
465 				case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
466 					hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
467 					break;
468 				default:
469 					DE_FATAL("Unsupported image format for Android hardware buffer export");
470 					break;
471 			}
472 			if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
473 				hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
474 			if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
475 				hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
476 			// if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
477 			//	hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
478 
479 			// Make sure we have at least one AHB GPU usage, even if the image doesn't have any
480 			// Vulkan usages with corresponding to AHB GPU usages.
481 			if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
482 				hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
483 		}
484 		else
485 		{
486 			hwbufferDesc.width = static_cast<deUint32>(pAllocInfo->allocationSize);
487 			hwbufferDesc.height = 1,
488 			hwbufferDesc.layers = 1,
489 			hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
490 			hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
491 		}
492 
493 		AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
494 	}
495 
496 	return hwbuffer;
497 }
498 
499 class ExternalDeviceMemoryAndroid : public DeviceMemory
500 {
501 public:
ExternalDeviceMemoryAndroid(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)502 						ExternalDeviceMemoryAndroid		(VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
503 		: m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
504 	{}
~ExternalDeviceMemoryAndroid(void)505 	virtual				~ExternalDeviceMemoryAndroid	(void)
506 	{
507 		if (m_hwbuffer)
508 			AHardwareBuffer_release(m_hwbuffer);
509 	}
510 
map(void)511 	virtual void*		map								(void) /*override*/
512 	{
513 		void* p;
514 		AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, NULL, &p);
515 		return p;
516 	}
517 
unmap(void)518 	virtual void		unmap							(void) /*override*/ { AHardwareBuffer_unlock(m_hwbuffer, NULL); }
519 
getHwBuffer(void)520 	AHardwareBuffer*	getHwBuffer						(void)				{ return m_hwbuffer;						}
521 
522 private:
523 	AHardwareBuffer* const	m_hwbuffer;
524 };
525 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
526 
527 #endif // CTS_USES_VULKANSC
528 
529 class DeferredOperationKHR
530 {
531 public:
DeferredOperationKHR(VkDevice)532 						DeferredOperationKHR		(VkDevice)
533 						{}
534 };
535 
536 class CommandBuffer
537 {
538 public:
CommandBuffer(VkDevice,VkCommandPool,VkCommandBufferLevel)539 						CommandBuffer				(VkDevice, VkCommandPool, VkCommandBufferLevel)
540 						{}
541 };
542 
543 class CommandPool
544 {
545 public:
CommandPool(VkDevice device,const VkCommandPoolCreateInfo *)546 										CommandPool		(VkDevice device, const VkCommandPoolCreateInfo*)
547 											: m_device(device)
548 										{}
549 #ifndef CTS_USES_VULKANSC
550 										~CommandPool	(void);
551 #endif // CTS_USES_VULKANSC
552 
553 	VkCommandBuffer						allocate		(VkCommandBufferLevel level);
554 	void								free			(VkCommandBuffer buffer);
555 
556 private:
557 	const VkDevice						m_device;
558 
559 	vector<CommandBuffer*>				m_buffers;
560 };
561 
562 #ifndef CTS_USES_VULKANSC
563 
~CommandPool(void)564 CommandPool::~CommandPool (void)
565 {
566 	for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
567 		delete m_buffers[ndx];
568 }
569 
570 #endif // CTS_USES_VULKANSC
571 
allocate(VkCommandBufferLevel level)572 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level)
573 {
574 	CommandBuffer* const	impl	= new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level);
575 
576 	try
577 	{
578 		m_buffers.push_back(impl);
579 	}
580 	catch (...)
581 	{
582 		delete impl;
583 		throw;
584 	}
585 
586 	return reinterpret_cast<VkCommandBuffer>(impl);
587 }
588 
free(VkCommandBuffer buffer)589 void CommandPool::free (VkCommandBuffer buffer)
590 {
591 	CommandBuffer* const	impl	= reinterpret_cast<CommandBuffer*>(buffer);
592 
593 	for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
594 	{
595 		if (m_buffers[ndx] == impl)
596 		{
597 			std::swap(m_buffers[ndx], m_buffers.back());
598 			m_buffers.pop_back();
599 			delete impl;
600 			return;
601 		}
602 	}
603 
604 	DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
605 }
606 
607 class DescriptorSet
608 {
609 public:
DescriptorSet(VkDevice,VkDescriptorPool,VkDescriptorSetLayout)610 	DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
611 };
612 
613 class DescriptorPool
614 {
615 public:
DescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo)616 										DescriptorPool	(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
617 											: m_device	(device)
618 											, m_flags	(pCreateInfo->flags)
619 										{}
~DescriptorPool(void)620 										~DescriptorPool	(void)
621 										{
622 											reset();
623 										}
624 
625 	VkDescriptorSet						allocate		(VkDescriptorSetLayout setLayout);
626 	void								free			(VkDescriptorSet set);
627 
628 	void								reset			(void);
629 
630 private:
631 	const VkDevice						m_device;
632 	const VkDescriptorPoolCreateFlags	m_flags;
633 
634 	vector<DescriptorSet*>				m_managedSets;
635 };
636 
allocate(VkDescriptorSetLayout setLayout)637 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
638 {
639 	DescriptorSet* const	impl	= new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
640 
641 	try
642 	{
643 		m_managedSets.push_back(impl);
644 	}
645 	catch (...)
646 	{
647 		delete impl;
648 		throw;
649 	}
650 
651 	return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
652 }
653 
free(VkDescriptorSet set)654 void DescriptorPool::free (VkDescriptorSet set)
655 {
656 	DescriptorSet* const	impl	= reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
657 
658 	DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
659 	DE_UNREF(m_flags);
660 
661 	for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
662 	{
663 		if (m_managedSets[ndx] == impl)
664 		{
665 			std::swap(m_managedSets[ndx], m_managedSets.back());
666 			m_managedSets.pop_back();
667 			delete impl;
668 			return;
669 		}
670 	}
671 
672 	DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
673 }
674 
reset(void)675 void DescriptorPool::reset (void)
676 {
677 	for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
678 		delete m_managedSets[ndx];
679 	m_managedSets.clear();
680 }
681 
682 // API implementation
683 
684 extern "C"
685 {
686 
getDeviceProcAddr(VkDevice device,const char * pName)687 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
688 {
689 	return reinterpret_cast<Device*>(device)->getProcAddr(pName);
690 }
691 
createGraphicsPipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)692 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
693 {
694 	deUint32 allocNdx;
695 	try
696 	{
697 		for (allocNdx = 0; allocNdx < count; allocNdx++)
698 			pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
699 
700 		return VK_SUCCESS;
701 	}
702 	catch (const std::bad_alloc&)
703 	{
704 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
705 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
706 
707 		return VK_ERROR_OUT_OF_HOST_MEMORY;
708 	}
709 	catch (VkResult err)
710 	{
711 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
712 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
713 
714 		return err;
715 	}
716 }
717 
createComputePipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)718 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
719 {
720 	deUint32 allocNdx;
721 	try
722 	{
723 		for (allocNdx = 0; allocNdx < count; allocNdx++)
724 			pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
725 
726 		return VK_SUCCESS;
727 	}
728 	catch (const std::bad_alloc&)
729 	{
730 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
731 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
732 
733 		return VK_ERROR_OUT_OF_HOST_MEMORY;
734 	}
735 	catch (VkResult err)
736 	{
737 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
738 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
739 
740 		return err;
741 	}
742 }
743 
744 #ifndef CTS_USES_VULKANSC
745 
createRayTracingPipelinesNV(VkDevice device,VkPipelineCache,deUint32 count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)746 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesNV (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
747 {
748 	deUint32 allocNdx;
749 	try
750 	{
751 		for (allocNdx = 0; allocNdx < count; allocNdx++)
752 			pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
753 
754 		return VK_SUCCESS;
755 	}
756 	catch (const std::bad_alloc&)
757 	{
758 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
759 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
760 
761 		return VK_ERROR_OUT_OF_HOST_MEMORY;
762 	}
763 	catch (VkResult err)
764 	{
765 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
766 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
767 
768 		return err;
769 	}
770 }
771 
createRayTracingPipelinesKHR(VkDevice device,VkPipelineCache,deUint32 count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)772 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesKHR (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
773 {
774 	deUint32 allocNdx;
775 	try
776 	{
777 		for (allocNdx = 0; allocNdx < count; allocNdx++)
778 			pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
779 
780 		return VK_SUCCESS;
781 	}
782 	catch (const std::bad_alloc&)
783 	{
784 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
785 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
786 
787 		return VK_ERROR_OUT_OF_HOST_MEMORY;
788 	}
789 	catch (VkResult err)
790 	{
791 		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
792 			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
793 
794 		return err;
795 	}
796 }
797 
798 #endif // CTS_USES_VULKANSC
799 
enumeratePhysicalDevices(VkInstance,deUint32 * pPhysicalDeviceCount,VkPhysicalDevice * pDevices)800 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
801 {
802 	if (pDevices && *pPhysicalDeviceCount >= 1u)
803 		*pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
804 
805 	*pPhysicalDeviceCount = 1;
806 
807 	return VK_SUCCESS;
808 }
809 
enumerateExtensions(deUint32 numExtensions,const VkExtensionProperties * extensions,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)810 VkResult enumerateExtensions (deUint32 numExtensions, const VkExtensionProperties* extensions, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
811 {
812 	const deUint32	dstSize		= pPropertyCount ? *pPropertyCount : 0;
813 
814 	if (pPropertyCount)
815 		*pPropertyCount = numExtensions;
816 
817 	if (pProperties)
818 	{
819 		for (deUint32 ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
820 			pProperties[ndx] = extensions[ndx];
821 
822 		if (dstSize < numExtensions)
823 			return VK_INCOMPLETE;
824 	}
825 
826 	return VK_SUCCESS;
827 }
828 
enumerateInstanceExtensionProperties(const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)829 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
830 {
831 	static const VkExtensionProperties	s_extensions[]	=
832 	{
833 		{ "VK_KHR_get_physical_device_properties2", 1u },
834 		{ "VK_KHR_external_memory_capabilities",	1u },
835 	};
836 
837 	if (!pLayerName)
838 		return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
839 	else
840 		return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
841 }
842 
enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)843 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
844 {
845 	DE_UNREF(physicalDevice);
846 
847 	static const VkExtensionProperties	s_extensions[]	=
848 	{
849 		{ "VK_KHR_bind_memory2",								1u },
850 		{ "VK_KHR_external_memory",							    1u },
851 		{ "VK_KHR_get_memory_requirements2",					1u },
852 		{ "VK_KHR_maintenance1",								1u },
853 		{ "VK_KHR_sampler_ycbcr_conversion",					1u },
854 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
855 		{ "VK_ANDROID_external_memory_android_hardware_buffer",	1u },
856 #endif
857 	};
858 
859 	if (!pLayerName)
860 		return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
861 	else
862 		return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
863 }
864 
getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)865 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
866 {
867 	DE_UNREF(physicalDevice);
868 
869 	// Enable all features allow as many tests to run as possible
870 	pFeatures->robustBufferAccess							= VK_TRUE;
871 	pFeatures->fullDrawIndexUint32							= VK_TRUE;
872 	pFeatures->imageCubeArray								= VK_TRUE;
873 	pFeatures->independentBlend								= VK_TRUE;
874 	pFeatures->geometryShader								= VK_TRUE;
875 	pFeatures->tessellationShader							= VK_TRUE;
876 	pFeatures->sampleRateShading							= VK_TRUE;
877 	pFeatures->dualSrcBlend									= VK_TRUE;
878 	pFeatures->logicOp										= VK_TRUE;
879 	pFeatures->multiDrawIndirect							= VK_TRUE;
880 	pFeatures->drawIndirectFirstInstance					= VK_TRUE;
881 	pFeatures->depthClamp									= VK_TRUE;
882 	pFeatures->depthBiasClamp								= VK_TRUE;
883 	pFeatures->fillModeNonSolid								= VK_TRUE;
884 	pFeatures->depthBounds									= VK_TRUE;
885 	pFeatures->wideLines									= VK_TRUE;
886 	pFeatures->largePoints									= VK_TRUE;
887 	pFeatures->alphaToOne									= VK_TRUE;
888 	pFeatures->multiViewport								= VK_TRUE;
889 	pFeatures->samplerAnisotropy							= VK_TRUE;
890 	pFeatures->textureCompressionETC2						= VK_TRUE;
891 	pFeatures->textureCompressionASTC_LDR					= VK_TRUE;
892 	pFeatures->textureCompressionBC							= VK_TRUE;
893 	pFeatures->occlusionQueryPrecise						= VK_TRUE;
894 	pFeatures->pipelineStatisticsQuery						= VK_TRUE;
895 	pFeatures->vertexPipelineStoresAndAtomics				= VK_TRUE;
896 	pFeatures->fragmentStoresAndAtomics						= VK_TRUE;
897 	pFeatures->shaderTessellationAndGeometryPointSize		= VK_TRUE;
898 	pFeatures->shaderImageGatherExtended					= VK_TRUE;
899 	pFeatures->shaderStorageImageExtendedFormats			= VK_TRUE;
900 	pFeatures->shaderStorageImageMultisample				= VK_TRUE;
901 	pFeatures->shaderStorageImageReadWithoutFormat			= VK_TRUE;
902 	pFeatures->shaderStorageImageWriteWithoutFormat			= VK_TRUE;
903 	pFeatures->shaderUniformBufferArrayDynamicIndexing		= VK_TRUE;
904 	pFeatures->shaderSampledImageArrayDynamicIndexing		= VK_TRUE;
905 	pFeatures->shaderStorageBufferArrayDynamicIndexing		= VK_TRUE;
906 	pFeatures->shaderStorageImageArrayDynamicIndexing		= VK_TRUE;
907 	pFeatures->shaderClipDistance							= VK_TRUE;
908 	pFeatures->shaderCullDistance							= VK_TRUE;
909 	pFeatures->shaderFloat64								= VK_TRUE;
910 	pFeatures->shaderInt64									= VK_TRUE;
911 	pFeatures->shaderInt16									= VK_TRUE;
912 	pFeatures->shaderResourceResidency						= VK_TRUE;
913 	pFeatures->shaderResourceMinLod							= VK_TRUE;
914 	pFeatures->sparseBinding								= VK_TRUE;
915 	pFeatures->sparseResidencyBuffer						= VK_TRUE;
916 	pFeatures->sparseResidencyImage2D						= VK_TRUE;
917 	pFeatures->sparseResidencyImage3D						= VK_TRUE;
918 	pFeatures->sparseResidency2Samples						= VK_TRUE;
919 	pFeatures->sparseResidency4Samples						= VK_TRUE;
920 	pFeatures->sparseResidency8Samples						= VK_TRUE;
921 	pFeatures->sparseResidency16Samples						= VK_TRUE;
922 	pFeatures->sparseResidencyAliased						= VK_TRUE;
923 	pFeatures->variableMultisampleRate						= VK_TRUE;
924 	pFeatures->inheritedQueries								= VK_TRUE;
925 }
926 
getPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * props)927 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
928 {
929 	deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
930 
931 	props->apiVersion		= VK_API_VERSION_1_1;
932 	props->driverVersion	= 1u;
933 	props->deviceType		= VK_PHYSICAL_DEVICE_TYPE_OTHER;
934 
935 	deMemcpy(props->deviceName, "null", 5);
936 
937 	// Spec minmax
938 	props->limits.maxImageDimension1D									= 4096;
939 	props->limits.maxImageDimension2D									= 4096;
940 	props->limits.maxImageDimension3D									= 256;
941 	props->limits.maxImageDimensionCube									= 4096;
942 	props->limits.maxImageArrayLayers									= 256;
943 	props->limits.maxTexelBufferElements								= 65536;
944 	props->limits.maxUniformBufferRange									= 16384;
945 	props->limits.maxStorageBufferRange									= 1u<<27;
946 	props->limits.maxPushConstantsSize									= 128;
947 	props->limits.maxMemoryAllocationCount								= 4096;
948 	props->limits.maxSamplerAllocationCount								= 4000;
949 	props->limits.bufferImageGranularity								= 131072;
950 	props->limits.sparseAddressSpaceSize								= 1u<<31;
951 	props->limits.maxBoundDescriptorSets								= 4;
952 	props->limits.maxPerStageDescriptorSamplers							= 16;
953 	props->limits.maxPerStageDescriptorUniformBuffers					= 12;
954 	props->limits.maxPerStageDescriptorStorageBuffers					= 4;
955 	props->limits.maxPerStageDescriptorSampledImages					= 16;
956 	props->limits.maxPerStageDescriptorStorageImages					= 4;
957 	props->limits.maxPerStageDescriptorInputAttachments					= 4;
958 	props->limits.maxPerStageResources									= 128;
959 	props->limits.maxDescriptorSetSamplers								= 96;
960 	props->limits.maxDescriptorSetUniformBuffers						= 72;
961 	props->limits.maxDescriptorSetUniformBuffersDynamic					= 8;
962 	props->limits.maxDescriptorSetStorageBuffers						= 24;
963 	props->limits.maxDescriptorSetStorageBuffersDynamic					= 4;
964 	props->limits.maxDescriptorSetSampledImages							= 96;
965 	props->limits.maxDescriptorSetStorageImages							= 24;
966 	props->limits.maxDescriptorSetInputAttachments						= 4;
967 	props->limits.maxVertexInputAttributes								= 16;
968 	props->limits.maxVertexInputBindings								= 16;
969 	props->limits.maxVertexInputAttributeOffset							= 2047;
970 	props->limits.maxVertexInputBindingStride							= 2048;
971 	props->limits.maxVertexOutputComponents								= 64;
972 	props->limits.maxTessellationGenerationLevel						= 64;
973 	props->limits.maxTessellationPatchSize								= 32;
974 	props->limits.maxTessellationControlPerVertexInputComponents		= 64;
975 	props->limits.maxTessellationControlPerVertexOutputComponents		= 64;
976 	props->limits.maxTessellationControlPerPatchOutputComponents		= 120;
977 	props->limits.maxTessellationControlTotalOutputComponents			= 2048;
978 	props->limits.maxTessellationEvaluationInputComponents				= 64;
979 	props->limits.maxTessellationEvaluationOutputComponents				= 64;
980 	props->limits.maxGeometryShaderInvocations							= 32;
981 	props->limits.maxGeometryInputComponents							= 64;
982 	props->limits.maxGeometryOutputComponents							= 64;
983 	props->limits.maxGeometryOutputVertices								= 256;
984 	props->limits.maxGeometryTotalOutputComponents						= 1024;
985 	props->limits.maxFragmentInputComponents							= 64;
986 	props->limits.maxFragmentOutputAttachments							= 4;
987 	props->limits.maxFragmentDualSrcAttachments							= 1;
988 	props->limits.maxFragmentCombinedOutputResources					= 4;
989 	props->limits.maxComputeSharedMemorySize							= 16384;
990 	props->limits.maxComputeWorkGroupCount[0]							= 65535;
991 	props->limits.maxComputeWorkGroupCount[1]							= 65535;
992 	props->limits.maxComputeWorkGroupCount[2]							= 65535;
993 	props->limits.maxComputeWorkGroupInvocations						= 128;
994 	props->limits.maxComputeWorkGroupSize[0]							= 128;
995 	props->limits.maxComputeWorkGroupSize[1]							= 128;
996 	props->limits.maxComputeWorkGroupSize[2]							= 128;
997 	props->limits.subPixelPrecisionBits									= 4;
998 	props->limits.subTexelPrecisionBits									= 4;
999 	props->limits.mipmapPrecisionBits									= 4;
1000 	props->limits.maxDrawIndexedIndexValue								= 0xffffffffu;
1001 	props->limits.maxDrawIndirectCount									= (1u<<16) - 1u;
1002 	props->limits.maxSamplerLodBias										= 2.0f;
1003 	props->limits.maxSamplerAnisotropy									= 16.0f;
1004 	props->limits.maxViewports											= 16;
1005 	props->limits.maxViewportDimensions[0]								= 4096;
1006 	props->limits.maxViewportDimensions[1]								= 4096;
1007 	props->limits.viewportBoundsRange[0]								= -8192.f;
1008 	props->limits.viewportBoundsRange[1]								= 8191.f;
1009 	props->limits.viewportSubPixelBits									= 0;
1010 	props->limits.minMemoryMapAlignment									= 64;
1011 	props->limits.minTexelBufferOffsetAlignment							= 256;
1012 	props->limits.minUniformBufferOffsetAlignment						= 256;
1013 	props->limits.minStorageBufferOffsetAlignment						= 256;
1014 	props->limits.minTexelOffset										= -8;
1015 	props->limits.maxTexelOffset										= 7;
1016 	props->limits.minTexelGatherOffset									= -8;
1017 	props->limits.maxTexelGatherOffset									= 7;
1018 	props->limits.minInterpolationOffset								= -0.5f;
1019 	props->limits.maxInterpolationOffset								= 0.5f; // -1ulp
1020 	props->limits.subPixelInterpolationOffsetBits						= 4;
1021 	props->limits.maxFramebufferWidth									= 4096;
1022 	props->limits.maxFramebufferHeight									= 4096;
1023 	props->limits.maxFramebufferLayers									= 256;
1024 	props->limits.framebufferColorSampleCounts							= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1025 	props->limits.framebufferDepthSampleCounts							= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1026 	props->limits.framebufferStencilSampleCounts						= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1027 	props->limits.framebufferNoAttachmentsSampleCounts					= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1028 	props->limits.maxColorAttachments									= 4;
1029 	props->limits.sampledImageColorSampleCounts							= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1030 	props->limits.sampledImageIntegerSampleCounts						= VK_SAMPLE_COUNT_1_BIT;
1031 	props->limits.sampledImageDepthSampleCounts							= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1032 	props->limits.sampledImageStencilSampleCounts						= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1033 	props->limits.storageImageSampleCounts								= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1034 	props->limits.maxSampleMaskWords									= 1;
1035 	props->limits.timestampComputeAndGraphics							= VK_TRUE;
1036 	props->limits.timestampPeriod										= 1.0f;
1037 	props->limits.maxClipDistances										= 8;
1038 	props->limits.maxCullDistances										= 8;
1039 	props->limits.maxCombinedClipAndCullDistances						= 8;
1040 	props->limits.discreteQueuePriorities								= 2;
1041 	props->limits.pointSizeRange[0]										= 1.0f;
1042 	props->limits.pointSizeRange[1]										= 64.0f; // -1ulp
1043 	props->limits.lineWidthRange[0]										= 1.0f;
1044 	props->limits.lineWidthRange[1]										= 8.0f; // -1ulp
1045 	props->limits.pointSizeGranularity									= 1.0f;
1046 	props->limits.lineWidthGranularity									= 1.0f;
1047 	props->limits.strictLines											= 0;
1048 	props->limits.standardSampleLocations								= VK_TRUE;
1049 	props->limits.optimalBufferCopyOffsetAlignment						= 256;
1050 	props->limits.optimalBufferCopyRowPitchAlignment					= 256;
1051 	props->limits.nonCoherentAtomSize									= 128;
1052 }
1053 
getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,deUint32 * count,VkQueueFamilyProperties * props)1054 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
1055 {
1056 	if (props && *count >= 1u)
1057 	{
1058 		deMemset(props, 0, sizeof(VkQueueFamilyProperties));
1059 
1060 		props->queueCount			= 4u;
1061 		props->queueFlags			= VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
1062 		props->timestampValidBits	= 64;
1063 	}
1064 
1065 	*count = 1u;
1066 }
1067 
getPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * props)1068 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
1069 {
1070 	deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
1071 
1072 	props->memoryTypeCount				= 1u;
1073 	props->memoryTypes[0].heapIndex		= 0u;
1074 	props->memoryTypes[0].propertyFlags	= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
1075 										| VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
1076 										| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1077 
1078 	props->memoryHeapCount				= 1u;
1079 	props->memoryHeaps[0].size			= 1ull << 31;
1080 	props->memoryHeaps[0].flags			= 0u;
1081 }
1082 
getPhysicalDeviceFormatProperties(VkPhysicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1083 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
1084 {
1085 	const VkFormatFeatureFlags	allFeatures	= VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
1086 											| VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
1087 											| VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
1088 											| VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
1089 											| VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
1090 											| VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
1091 											| VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
1092 											| VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
1093 											| VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
1094 											| VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
1095 											| VK_FORMAT_FEATURE_BLIT_SRC_BIT
1096 											| VK_FORMAT_FEATURE_BLIT_DST_BIT
1097 											| VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
1098 											| VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
1099 											| VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
1100 											| VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
1101 											| VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
1102 											| VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
1103 											| VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1104 
1105 	pFormatProperties->linearTilingFeatures		= allFeatures;
1106 	pFormatProperties->optimalTilingFeatures	= allFeatures;
1107 	pFormatProperties->bufferFeatures			= allFeatures;
1108 
1109 	if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1110 		pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1111 }
1112 
getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1113 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
1114 {
1115 	DE_UNREF(physicalDevice);
1116 	DE_UNREF(format);
1117 	DE_UNREF(type);
1118 	DE_UNREF(tiling);
1119 	DE_UNREF(usage);
1120 	DE_UNREF(flags);
1121 
1122 	pImageFormatProperties->maxArrayLayers		= 8;
1123 	pImageFormatProperties->maxExtent.width		= 4096;
1124 	pImageFormatProperties->maxExtent.height	= 4096;
1125 	pImageFormatProperties->maxExtent.depth		= 4096;
1126 	pImageFormatProperties->maxMipLevels		= deLog2Ceil32(4096) + 1;
1127 	pImageFormatProperties->maxResourceSize		= 64u * 1024u * 1024u;
1128 	pImageFormatProperties->sampleCounts		= VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1129 
1130 	return VK_SUCCESS;
1131 }
1132 
getDeviceQueue(VkDevice device,deUint32 queueFamilyIndex,deUint32 queueIndex,VkQueue * pQueue)1133 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
1134 {
1135 	DE_UNREF(device);
1136 	DE_UNREF(queueFamilyIndex);
1137 
1138 	if (pQueue)
1139 		*pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1);
1140 }
1141 
getBufferMemoryRequirements(VkDevice,VkBuffer bufferHandle,VkMemoryRequirements * requirements)1142 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
1143 {
1144 	const Buffer*	buffer	= reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
1145 
1146 	requirements->memoryTypeBits	= 1u;
1147 	requirements->size				= buffer->getSize();
1148 	requirements->alignment			= (VkDeviceSize)1u;
1149 }
1150 
getPackedImageDataSize(VkFormat format,VkExtent3D extent,VkSampleCountFlagBits samples)1151 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1152 {
1153 	return (VkDeviceSize)getPixelSize(mapVkFormat(format))
1154 			* (VkDeviceSize)extent.width
1155 			* (VkDeviceSize)extent.height
1156 			* (VkDeviceSize)extent.depth
1157 			* (VkDeviceSize)samples;
1158 }
1159 
getCompressedImageDataSize(VkFormat format,VkExtent3D extent)1160 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
1161 {
1162 	try
1163 	{
1164 		const tcu::CompressedTexFormat	tcuFormat		= mapVkCompressedFormat(format);
1165 		const size_t					blockSize		= tcu::getBlockSize(tcuFormat);
1166 		const tcu::IVec3				blockPixelSize	= tcu::getBlockPixelSize(tcuFormat);
1167 		const int						numBlocksX		= deDivRoundUp32((int)extent.width, blockPixelSize.x());
1168 		const int						numBlocksY		= deDivRoundUp32((int)extent.height, blockPixelSize.y());
1169 		const int						numBlocksZ		= deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1170 
1171 		return blockSize*numBlocksX*numBlocksY*numBlocksZ;
1172 	}
1173 	catch (...)
1174 	{
1175 		return 0; // Unsupported compressed format
1176 	}
1177 }
1178 
getYCbCrImageDataSize(VkFormat format,VkExtent3D extent)1179 VkDeviceSize getYCbCrImageDataSize (VkFormat format, VkExtent3D extent)
1180 {
1181 	const PlanarFormatDescription	desc		= getPlanarFormatDescription(format);
1182 	VkDeviceSize					totalSize	= 0;
1183 
1184 	DE_ASSERT(extent.depth == 1);
1185 
1186 	for (deUint32 planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1187 	{
1188 		const deUint32	elementSize	= desc.planes[planeNdx].elementSizeBytes;
1189 
1190 		totalSize = (VkDeviceSize)deAlign64((deInt64)totalSize, elementSize);
1191 		totalSize += getPlaneSizeInBytes(desc, extent, planeNdx, 0, BUFFER_IMAGE_COPY_OFFSET_GRANULARITY);
1192 	}
1193 
1194 	return totalSize;
1195 }
1196 
getImageMemoryRequirements(VkDevice,VkImage imageHandle,VkMemoryRequirements * requirements)1197 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
1198 {
1199 	const Image*	image	= reinterpret_cast<const Image*>(imageHandle.getInternal());
1200 
1201 	requirements->memoryTypeBits	= 1u;
1202 	requirements->alignment			= 16u;
1203 
1204 	if (isCompressedFormat(image->getFormat()))
1205 		requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1206 	else if (isYCbCrFormat(image->getFormat()))
1207 		requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1208 	else
1209 		requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1210 }
1211 
allocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1212 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
1213 {
1214 #ifndef CTS_USES_VULKANSC
1215 	const VkExportMemoryAllocateInfo* const					exportInfo	= findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1216 	const VkImportAndroidHardwareBufferInfoANDROID* const	importInfo	= findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1217 
1218 	if ((exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
1219 		|| (importInfo && importInfo->buffer.internal))
1220 	{
1221 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1222 		VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1223 #else
1224 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1225 #endif
1226 	}
1227 	else
1228 	{
1229 		VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1230 	}
1231 #else // CTS_USES_VULKANSC
1232 	VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1233 #endif // CTS_USES_VULKANSC
1234 }
1235 
mapMemory(VkDevice,VkDeviceMemory memHandle,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1236 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
1237 {
1238 	DeviceMemory* const	memory	= reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1239 
1240 	DE_UNREF(size);
1241 	DE_UNREF(flags);
1242 
1243 	*ppData = (deUint8*)memory->map() + offset;
1244 
1245 	return VK_SUCCESS;
1246 }
1247 
unmapMemory(VkDevice device,VkDeviceMemory memHandle)1248 VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memHandle)
1249 {
1250 	DeviceMemory* const	memory	= reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1251 
1252 	DE_UNREF(device);
1253 
1254 	memory->unmap();
1255 }
1256 
1257 #ifndef CTS_USES_VULKANSC
1258 
getMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,pt::AndroidHardwareBufferPtr * pBuffer)1259 VKAPI_ATTR VkResult VKAPI_CALL getMemoryAndroidHardwareBufferANDROID (VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, pt::AndroidHardwareBufferPtr* pBuffer)
1260 {
1261 	DE_UNREF(device);
1262 
1263 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1264 	DeviceMemory* const					memory			= reinterpret_cast<ExternalDeviceMemoryAndroid*>(pInfo->memory.getInternal());
1265 	ExternalDeviceMemoryAndroid* const	androidMemory	= static_cast<ExternalDeviceMemoryAndroid*>(memory);
1266 
1267 	AHardwareBuffer* hwbuffer = androidMemory->getHwBuffer();
1268 	AHardwareBuffer_acquire(hwbuffer);
1269 	pBuffer->internal = hwbuffer;
1270 #else
1271 	DE_UNREF(pInfo);
1272 	DE_UNREF(pBuffer);
1273 #endif
1274 
1275 	return VK_SUCCESS;
1276 }
1277 
1278 #endif // CTS_USES_VULKANSC
1279 
allocateDescriptorSets(VkDevice,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1280 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
1281 {
1282 	DescriptorPool* const	poolImpl	= reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
1283 
1284 	for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1285 	{
1286 		try
1287 		{
1288 			pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1289 		}
1290 		catch (const std::bad_alloc&)
1291 		{
1292 			for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1293 				delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1294 
1295 			return VK_ERROR_OUT_OF_HOST_MEMORY;
1296 		}
1297 		catch (VkResult res)
1298 		{
1299 			for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1300 				delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1301 
1302 			return res;
1303 		}
1304 	}
1305 
1306 	return VK_SUCCESS;
1307 }
1308 
freeDescriptorSets(VkDevice,VkDescriptorPool descriptorPool,deUint32 count,const VkDescriptorSet * pDescriptorSets)1309 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
1310 {
1311 	DescriptorPool* const	poolImpl	= reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1312 
1313 	for (deUint32 ndx = 0; ndx < count; ++ndx)
1314 		poolImpl->free(pDescriptorSets[ndx]);
1315 }
1316 
resetDescriptorPool(VkDevice,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags)1317 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
1318 {
1319 	DescriptorPool* const	poolImpl	= reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1320 
1321 	poolImpl->reset();
1322 
1323 	return VK_SUCCESS;
1324 }
1325 
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)1326 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
1327 {
1328 	DE_UNREF(device);
1329 
1330 	if (pAllocateInfo && pCommandBuffers)
1331 	{
1332 		CommandPool* const	poolImpl	= reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal());
1333 
1334 		for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1335 			pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1336 	}
1337 
1338 	return VK_SUCCESS;
1339 }
1340 
freeCommandBuffers(VkDevice device,VkCommandPool commandPool,deUint32 commandBufferCount,const VkCommandBuffer * pCommandBuffers)1341 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
1342 {
1343 	CommandPool* const	poolImpl	= reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal());
1344 
1345 	DE_UNREF(device);
1346 
1347 	for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
1348 		poolImpl->free(pCommandBuffers[ndx]);
1349 }
1350 
1351 
createDisplayModeKHR(VkPhysicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)1352 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode)
1353 {
1354 	DE_UNREF(pAllocator);
1355 	VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1356 }
1357 
createSharedSwapchainsKHR(VkDevice device,deUint32 swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)1358 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains)
1359 {
1360 	for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx)
1361 	{
1362 		pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator);
1363 	}
1364 
1365 	return VK_SUCCESS;
1366 }
1367 
getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1368 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties)
1369 {
1370 	DE_UNREF(physicalDevice);
1371 	DE_UNREF(pExternalBufferInfo);
1372 
1373 	pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
1374 	pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1375 	pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0;
1376 
1377 #ifndef CTS_USES_VULKANSC
1378 	if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1379 	{
1380 		pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1381 		pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1382 		pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1383 	}
1384 #endif // CTS_USES_VULKANSC
1385 }
1386 
getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1387 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties)
1388 {
1389 #ifndef CTS_USES_VULKANSC
1390 	const VkPhysicalDeviceExternalImageFormatInfo* const	externalInfo		= findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1391 	VkExternalImageFormatProperties*	const				externalProperties	= findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1392 	VkResult												result;
1393 
1394 	result = getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1395 	if (result != VK_SUCCESS)
1396 		return result;
1397 
1398 	if (externalInfo && externalInfo->handleType != 0)
1399 	{
1400 		if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1401 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
1402 
1403 		if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM
1404 			  || pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM
1405 			  || pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16
1406 			  || pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT
1407 			  || pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1408 		{
1409 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
1410 		}
1411 
1412 		if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1413 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
1414 
1415 		if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT
1416 										| VK_IMAGE_USAGE_TRANSFER_DST_BIT
1417 										| VK_IMAGE_USAGE_SAMPLED_BIT
1418 										| VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
1419 			!= 0)
1420 		{
1421 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
1422 		}
1423 
1424 		if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1425 										/*| VK_IMAGE_CREATE_PROTECTED_BIT*/
1426 										/*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT*/))
1427 			!= 0)
1428 		{
1429 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
1430 		}
1431 
1432 		if (externalProperties)
1433 		{
1434 			externalProperties->externalMemoryProperties.externalMemoryFeatures			= VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT
1435 																						| VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT
1436 																						| VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1437 			externalProperties->externalMemoryProperties.exportFromImportedHandleTypes	= VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1438 			externalProperties->externalMemoryProperties.compatibleHandleTypes			= VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1439 		}
1440 	}
1441 
1442 	return VK_SUCCESS;
1443 #else // CTS_USES_VULKANSC
1444 	return getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1445 #endif // CTS_USES_VULKANSC
1446 }
1447 
1448 // \note getInstanceProcAddr is a little bit special:
1449 // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1450 // getInstanceProcAddr() implementation needs other entry points from
1451 // vkNullDriverImpl.inl.
1452 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName);
1453 
1454 #include "vkNullDriverImpl.inl"
1455 
getInstanceProcAddr(VkInstance instance,const char * pName)1456 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
1457 {
1458 	if (instance)
1459 	{
1460 		return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
1461 	}
1462 	else
1463 	{
1464 		const std::string	name	= pName;
1465 
1466 		if (name == "vkCreateInstance")
1467 			return (PFN_vkVoidFunction)createInstance;
1468 		else if (name == "vkEnumerateInstanceExtensionProperties")
1469 			return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1470 		else if (name == "vkEnumerateInstanceLayerProperties")
1471 			return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1472 		else
1473 			return (PFN_vkVoidFunction)DE_NULL;
1474 	}
1475 }
1476 
1477 } // extern "C"
1478 
Instance(const VkInstanceCreateInfo *)1479 Instance::Instance (const VkInstanceCreateInfo*)
1480 	: m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1481 {
1482 }
1483 
Device(VkPhysicalDevice,const VkDeviceCreateInfo *)1484 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
1485 	: m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1486 {
1487 }
1488 
1489 class NullDriverLibrary : public Library
1490 {
1491 public:
NullDriverLibrary(void)1492 										NullDriverLibrary (void)
1493 											: m_library	(s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1494 											, m_driver	(m_library)
1495 										{}
1496 
getPlatformInterface(void) const1497 	const PlatformInterface&			getPlatformInterface	(void) const	{ return m_driver;	}
getFunctionLibrary(void) const1498 	const tcu::FunctionLibrary&			getFunctionLibrary		(void) const	{ return m_library;	}
1499 private:
1500 	const tcu::StaticFunctionLibrary	m_library;
1501 	const PlatformDriver				m_driver;
1502 };
1503 
1504 } // anonymous
1505 
createNullDriver(void)1506 Library* createNullDriver (void)
1507 {
1508 	return new NullDriverLibrary();
1509 }
1510 
1511 } // vk
1512