1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2015 Google Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Null (dummy) Vulkan implementation.
22 *//*--------------------------------------------------------------------*/
23
24 #include "vkNullDriver.hpp"
25 #include "vkPlatform.hpp"
26 #include "vkImageUtil.hpp"
27 #include "vkQueryUtil.hpp"
28 #include "tcuFunctionLibrary.hpp"
29 #include "deMemory.h"
30
31 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
32 # define USE_ANDROID_O_HARDWARE_BUFFER
33 #endif
34 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
35 # include <android/hardware_buffer.h>
36 #endif
37
38 #include <stdexcept>
39 #include <algorithm>
40
41 namespace vk
42 {
43
44 namespace
45 {
46
47 using std::vector;
48
49 // Memory management
50
51 template<typename T>
allocateSystemMem(const VkAllocationCallbacks * pAllocator,VkSystemAllocationScope scope)52 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
53 {
54 void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
55 if (!ptr)
56 throw std::bad_alloc();
57 return ptr;
58 }
59
freeSystemMem(const VkAllocationCallbacks * pAllocator,void * mem)60 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
61 {
62 pAllocator->pfnFree(pAllocator->pUserData, mem);
63 }
64
65 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)66 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
67 {
68 Object* obj = DE_NULL;
69
70 if (pAllocator)
71 {
72 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
73 try
74 {
75 obj = new (mem) Object(parent, pCreateInfo);
76 DE_ASSERT(obj == mem);
77 }
78 catch (...)
79 {
80 pAllocator->pfnFree(pAllocator->pUserData, mem);
81 throw;
82 }
83 }
84 else
85 obj = new Object(parent, pCreateInfo);
86
87 return reinterpret_cast<Handle>(obj);
88 }
89
90 template<typename Object, typename Handle, typename CreateInfo>
allocateHandle(const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)91 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
92 {
93 Object* obj = DE_NULL;
94
95 if (pAllocator)
96 {
97 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
98 try
99 {
100 obj = new (mem) Object(pCreateInfo);
101 DE_ASSERT(obj == mem);
102 }
103 catch (...)
104 {
105 pAllocator->pfnFree(pAllocator->pUserData, mem);
106 throw;
107 }
108 }
109 else
110 obj = new Object(pCreateInfo);
111
112 return reinterpret_cast<Handle>(obj);
113 }
114
115 template<typename Object, typename Handle>
freeHandle(Handle handle,const VkAllocationCallbacks * pAllocator)116 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
117 {
118 Object* obj = reinterpret_cast<Object*>(handle);
119
120 if (pAllocator)
121 {
122 obj->~Object();
123 freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
124 }
125 else
126 delete obj;
127 }
128
129 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)130 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
131 {
132 Object* const obj = allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator);
133 return Handle((deUint64)(deUintptr)static_cast<BaseObject*>(obj));
134 }
135
136 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)137 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
138 {
139 return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
140 }
141
142 template<typename Object, typename Handle>
freeNonDispHandle(Handle handle,const VkAllocationCallbacks * pAllocator)143 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
144 {
145 freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
146 }
147
148 // Object definitions
149
150 #define VK_NULL_RETURN(STMT) \
151 do { \
152 try { \
153 STMT; \
154 return VK_SUCCESS; \
155 } catch (const std::bad_alloc&) { \
156 return VK_ERROR_OUT_OF_HOST_MEMORY; \
157 } catch (VkResult res) { \
158 return res; \
159 } \
160 } while (deGetFalse())
161
162 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
163 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) { #NAME, (deFunctionPtr)FUNC } // NOLINT(FUNC)
164
165 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \
166 struct NAME \
167 { \
168 NAME (VkDevice, const Vk##NAME##CreateInfo*) {} \
169 }
170
171 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
172 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
173 VK_NULL_DEFINE_DEVICE_OBJ(Event);
174 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
175 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
176 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
177 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
178 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
179 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
180 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
181 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
182 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
183
184 class Instance
185 {
186 public:
187 Instance (const VkInstanceCreateInfo* instanceInfo);
~Instance(void)188 ~Instance (void) {}
189
getProcAddr(const char * name) const190 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
191
192 private:
193 const tcu::StaticFunctionLibrary m_functions;
194 };
195
196 class SurfaceKHR
197 {
198 public:
SurfaceKHR(VkInstance,const VkXlibSurfaceCreateInfoKHR *)199 SurfaceKHR (VkInstance, const VkXlibSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkXcbSurfaceCreateInfoKHR *)200 SurfaceKHR (VkInstance, const VkXcbSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkWaylandSurfaceCreateInfoKHR *)201 SurfaceKHR (VkInstance, const VkWaylandSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkMirSurfaceCreateInfoKHR *)202 SurfaceKHR (VkInstance, const VkMirSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkAndroidSurfaceCreateInfoKHR *)203 SurfaceKHR (VkInstance, const VkAndroidSurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkWin32SurfaceCreateInfoKHR *)204 SurfaceKHR (VkInstance, const VkWin32SurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkDisplaySurfaceCreateInfoKHR *)205 SurfaceKHR (VkInstance, const VkDisplaySurfaceCreateInfoKHR*) {}
SurfaceKHR(VkInstance,const VkViSurfaceCreateInfoNN *)206 SurfaceKHR (VkInstance, const VkViSurfaceCreateInfoNN*) {}
SurfaceKHR(VkInstance,const VkIOSSurfaceCreateInfoMVK *)207 SurfaceKHR (VkInstance, const VkIOSSurfaceCreateInfoMVK*) {}
SurfaceKHR(VkInstance,const VkMacOSSurfaceCreateInfoMVK *)208 SurfaceKHR (VkInstance, const VkMacOSSurfaceCreateInfoMVK*) {}
~SurfaceKHR(void)209 ~SurfaceKHR (void) {}
210 };
211
212 class DisplayModeKHR
213 {
214 public:
DisplayModeKHR(VkDisplayKHR,const VkDisplayModeCreateInfoKHR *)215 DisplayModeKHR (VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {}
~DisplayModeKHR(void)216 ~DisplayModeKHR (void) {}
217 };
218
219 class DebugReportCallbackEXT
220 {
221 public:
DebugReportCallbackEXT(VkInstance,const VkDebugReportCallbackCreateInfoEXT *)222 DebugReportCallbackEXT (VkInstance, const VkDebugReportCallbackCreateInfoEXT*) {}
~DebugReportCallbackEXT(void)223 ~DebugReportCallbackEXT (void) {}
224 };
225
226 class Device
227 {
228 public:
229 Device (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
~Device(void)230 ~Device (void) {}
231
getProcAddr(const char * name) const232 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
233
234 private:
235 const tcu::StaticFunctionLibrary m_functions;
236 };
237
238 class Pipeline
239 {
240 public:
Pipeline(VkDevice,const VkGraphicsPipelineCreateInfo *)241 Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
Pipeline(VkDevice,const VkComputePipelineCreateInfo *)242 Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
243 };
244
245 class RenderPass
246 {
247 public:
RenderPass(VkDevice,const VkRenderPassCreateInfo *)248 RenderPass (VkDevice, const VkRenderPassCreateInfo*) {}
RenderPass(VkDevice,const VkRenderPassCreateInfo2KHR *)249 RenderPass (VkDevice, const VkRenderPassCreateInfo2KHR*) {}
250 };
251
252 class SwapchainKHR
253 {
254 public:
SwapchainKHR(VkDevice,const VkSwapchainCreateInfoKHR *)255 SwapchainKHR (VkDevice, const VkSwapchainCreateInfoKHR*) {}
~SwapchainKHR(void)256 ~SwapchainKHR (void) {}
257 };
258
259 class SamplerYcbcrConversion
260 {
261 public:
SamplerYcbcrConversion(VkDevice,const VkSamplerYcbcrConversionCreateInfo *)262 SamplerYcbcrConversion (VkDevice, const VkSamplerYcbcrConversionCreateInfo*) {}
263 };
264
265 class Buffer
266 {
267 public:
Buffer(VkDevice,const VkBufferCreateInfo * pCreateInfo)268 Buffer (VkDevice, const VkBufferCreateInfo* pCreateInfo)
269 : m_size (pCreateInfo->size)
270 {
271 }
272
getSize(void) const273 VkDeviceSize getSize (void) const { return m_size; }
274
275 private:
276 const VkDeviceSize m_size;
277 };
278
getExternalTypesHandle(const VkImageCreateInfo * pCreateInfo)279 VkExternalMemoryHandleTypeFlags getExternalTypesHandle (const VkImageCreateInfo* pCreateInfo)
280 {
281 const VkExternalMemoryImageCreateInfo* const externalInfo = findStructure<VkExternalMemoryImageCreateInfo> (pCreateInfo->pNext);
282
283 return externalInfo ? externalInfo->handleTypes : 0u;
284 }
285
286 class Image
287 {
288 public:
Image(VkDevice,const VkImageCreateInfo * pCreateInfo)289 Image (VkDevice, const VkImageCreateInfo* pCreateInfo)
290 : m_imageType (pCreateInfo->imageType)
291 , m_format (pCreateInfo->format)
292 , m_extent (pCreateInfo->extent)
293 , m_arrayLayers (pCreateInfo->arrayLayers)
294 , m_samples (pCreateInfo->samples)
295 , m_usage (pCreateInfo->usage)
296 , m_flags (pCreateInfo->flags)
297 , m_externalHandleTypes (getExternalTypesHandle(pCreateInfo))
298 {
299 }
300
getImageType(void) const301 VkImageType getImageType (void) const { return m_imageType; }
getFormat(void) const302 VkFormat getFormat (void) const { return m_format; }
getExtent(void) const303 VkExtent3D getExtent (void) const { return m_extent; }
getArrayLayers(void) const304 deUint32 getArrayLayers (void) const { return m_arrayLayers; }
getSamples(void) const305 VkSampleCountFlagBits getSamples (void) const { return m_samples; }
getUsage(void) const306 VkImageUsageFlags getUsage (void) const { return m_usage; }
getFlags(void) const307 VkImageCreateFlags getFlags (void) const { return m_flags; }
getExternalHandleTypes(void) const308 VkExternalMemoryHandleTypeFlags getExternalHandleTypes (void) const { return m_externalHandleTypes; }
309
310 private:
311 const VkImageType m_imageType;
312 const VkFormat m_format;
313 const VkExtent3D m_extent;
314 const deUint32 m_arrayLayers;
315 const VkSampleCountFlagBits m_samples;
316 const VkImageUsageFlags m_usage;
317 const VkImageCreateFlags m_flags;
318 const VkExternalMemoryHandleTypeFlags m_externalHandleTypes;
319 };
320
allocateHeap(const VkMemoryAllocateInfo * pAllocInfo)321 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
322 {
323 // \todo [2015-12-03 pyry] Alignment requirements?
324 // \todo [2015-12-03 pyry] Empty allocations okay?
325 if (pAllocInfo->allocationSize > 0)
326 {
327 void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
328 if (!heapPtr)
329 throw std::bad_alloc();
330 return heapPtr;
331 }
332 else
333 return DE_NULL;
334 }
335
freeHeap(void * ptr)336 void freeHeap (void* ptr)
337 {
338 deFree(ptr);
339 }
340
341 class DeviceMemory
342 {
343 public:
~DeviceMemory(void)344 virtual ~DeviceMemory (void) {}
345 virtual void* map (void) = 0;
346 virtual void unmap (void) = 0;
347 };
348
349 class PrivateDeviceMemory : public DeviceMemory
350 {
351 public:
PrivateDeviceMemory(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)352 PrivateDeviceMemory (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
353 : m_memory(allocateHeap(pAllocInfo))
354 {
355 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
356 // but currently it mostly hinders it.
357 if (m_memory)
358 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
359 }
~PrivateDeviceMemory(void)360 virtual ~PrivateDeviceMemory (void)
361 {
362 freeHeap(m_memory);
363 }
364
map(void)365 virtual void* map (void) /*override*/ { return m_memory; }
unmap(void)366 virtual void unmap (void) /*override*/ {}
367
368 private:
369 void* const m_memory;
370 };
371
372 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
findOrCreateHwBuffer(const VkMemoryAllocateInfo * pAllocInfo)373 AHardwareBuffer* findOrCreateHwBuffer (const VkMemoryAllocateInfo* pAllocInfo)
374 {
375 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
376 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
377 const VkMemoryDedicatedAllocateInfo* const dedicatedInfo = findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
378 const Image* const image = dedicatedInfo && !!dedicatedInfo->image ? reinterpret_cast<const Image*>(dedicatedInfo->image.getInternal()) : DE_NULL;
379 AHardwareBuffer* hwbuffer = DE_NULL;
380
381 // Import and export aren't mutually exclusive; we can have both simultaneously.
382 DE_ASSERT((importInfo && importInfo->buffer.internal) ||
383 (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
384
385 if (importInfo && importInfo->buffer.internal)
386 {
387 hwbuffer = (AHardwareBuffer*)importInfo->buffer.internal;
388 AHardwareBuffer_acquire(hwbuffer);
389 }
390 else if (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
391 {
392 AHardwareBuffer_Desc hwbufferDesc;
393 deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
394
395 if (image)
396 {
397 hwbufferDesc.width = image->getExtent().width;
398 hwbufferDesc.height = image->getExtent().height;
399 hwbufferDesc.layers = image->getArrayLayers();
400 switch (image->getFormat())
401 {
402 case VK_FORMAT_R8G8B8A8_UNORM:
403 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
404 break;
405 case VK_FORMAT_R8G8B8_UNORM:
406 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
407 break;
408 case VK_FORMAT_R5G6B5_UNORM_PACK16:
409 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
410 break;
411 case VK_FORMAT_R16G16B16A16_SFLOAT:
412 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
413 break;
414 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
415 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
416 break;
417 default:
418 DE_FATAL("Unsupported image format for Android hardware buffer export");
419 break;
420 }
421 if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
422 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
423 if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
424 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
425 // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
426 // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
427
428 // Make sure we have at least one AHB GPU usage, even if the image doesn't have any
429 // Vulkan usages with corresponding to AHB GPU usages.
430 if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
431 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
432 }
433 else
434 {
435 hwbufferDesc.width = static_cast<deUint32>(pAllocInfo->allocationSize);
436 hwbufferDesc.height = 1,
437 hwbufferDesc.layers = 1,
438 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
439 hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
440 }
441
442 AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
443 }
444
445 return hwbuffer;
446 }
447
448 class ExternalDeviceMemoryAndroid : public DeviceMemory
449 {
450 public:
ExternalDeviceMemoryAndroid(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)451 ExternalDeviceMemoryAndroid (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
452 : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
453 {}
~ExternalDeviceMemoryAndroid(void)454 virtual ~ExternalDeviceMemoryAndroid (void)
455 {
456 if (m_hwbuffer)
457 AHardwareBuffer_release(m_hwbuffer);
458 }
459
map(void)460 virtual void* map (void) /*override*/
461 {
462 void* p;
463 AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, NULL, &p);
464 return p;
465 }
466
unmap(void)467 virtual void unmap (void) /*override*/ { AHardwareBuffer_unlock(m_hwbuffer, NULL); }
468
getHwBuffer(void)469 AHardwareBuffer* getHwBuffer (void) { return m_hwbuffer; }
470
471 private:
472 AHardwareBuffer* const m_hwbuffer;
473 };
474 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
475
476 class IndirectCommandsLayoutNVX
477 {
478 public:
IndirectCommandsLayoutNVX(VkDevice,const VkIndirectCommandsLayoutCreateInfoNVX *)479 IndirectCommandsLayoutNVX (VkDevice, const VkIndirectCommandsLayoutCreateInfoNVX*)
480 {}
481 };
482
483 class ObjectTableNVX
484 {
485 public:
ObjectTableNVX(VkDevice,const VkObjectTableCreateInfoNVX *)486 ObjectTableNVX (VkDevice, const VkObjectTableCreateInfoNVX*)
487 {}
488 };
489
490 class ValidationCacheEXT
491 {
492 public:
ValidationCacheEXT(VkDevice,const VkValidationCacheCreateInfoEXT *)493 ValidationCacheEXT (VkDevice, const VkValidationCacheCreateInfoEXT*)
494 {}
495 };
496
497 class CommandBuffer
498 {
499 public:
CommandBuffer(VkDevice,VkCommandPool,VkCommandBufferLevel)500 CommandBuffer (VkDevice, VkCommandPool, VkCommandBufferLevel)
501 {}
502 };
503
504 class DescriptorUpdateTemplate
505 {
506 public:
DescriptorUpdateTemplate(VkDevice,const VkDescriptorUpdateTemplateCreateInfo *)507 DescriptorUpdateTemplate (VkDevice, const VkDescriptorUpdateTemplateCreateInfo*)
508 {}
509 };
510
511
512 class CommandPool
513 {
514 public:
CommandPool(VkDevice device,const VkCommandPoolCreateInfo *)515 CommandPool (VkDevice device, const VkCommandPoolCreateInfo*)
516 : m_device(device)
517 {}
518 ~CommandPool (void);
519
520 VkCommandBuffer allocate (VkCommandBufferLevel level);
521 void free (VkCommandBuffer buffer);
522
523 private:
524 const VkDevice m_device;
525
526 vector<CommandBuffer*> m_buffers;
527 };
528
~CommandPool(void)529 CommandPool::~CommandPool (void)
530 {
531 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
532 delete m_buffers[ndx];
533 }
534
allocate(VkCommandBufferLevel level)535 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level)
536 {
537 CommandBuffer* const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level);
538
539 try
540 {
541 m_buffers.push_back(impl);
542 }
543 catch (...)
544 {
545 delete impl;
546 throw;
547 }
548
549 return reinterpret_cast<VkCommandBuffer>(impl);
550 }
551
free(VkCommandBuffer buffer)552 void CommandPool::free (VkCommandBuffer buffer)
553 {
554 CommandBuffer* const impl = reinterpret_cast<CommandBuffer*>(buffer);
555
556 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
557 {
558 if (m_buffers[ndx] == impl)
559 {
560 std::swap(m_buffers[ndx], m_buffers.back());
561 m_buffers.pop_back();
562 delete impl;
563 return;
564 }
565 }
566
567 DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
568 }
569
570 class DescriptorSet
571 {
572 public:
DescriptorSet(VkDevice,VkDescriptorPool,VkDescriptorSetLayout)573 DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
574 };
575
576 class DescriptorPool
577 {
578 public:
DescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo)579 DescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
580 : m_device (device)
581 , m_flags (pCreateInfo->flags)
582 {}
~DescriptorPool(void)583 ~DescriptorPool (void)
584 {
585 reset();
586 }
587
588 VkDescriptorSet allocate (VkDescriptorSetLayout setLayout);
589 void free (VkDescriptorSet set);
590
591 void reset (void);
592
593 private:
594 const VkDevice m_device;
595 const VkDescriptorPoolCreateFlags m_flags;
596
597 vector<DescriptorSet*> m_managedSets;
598 };
599
allocate(VkDescriptorSetLayout setLayout)600 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
601 {
602 DescriptorSet* const impl = new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
603
604 try
605 {
606 m_managedSets.push_back(impl);
607 }
608 catch (...)
609 {
610 delete impl;
611 throw;
612 }
613
614 return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
615 }
616
free(VkDescriptorSet set)617 void DescriptorPool::free (VkDescriptorSet set)
618 {
619 DescriptorSet* const impl = reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
620
621 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
622 DE_UNREF(m_flags);
623
624 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
625 {
626 if (m_managedSets[ndx] == impl)
627 {
628 std::swap(m_managedSets[ndx], m_managedSets.back());
629 m_managedSets.pop_back();
630 delete impl;
631 return;
632 }
633 }
634
635 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
636 }
637
reset(void)638 void DescriptorPool::reset (void)
639 {
640 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
641 delete m_managedSets[ndx];
642 m_managedSets.clear();
643 }
644
645 // API implementation
646
647 extern "C"
648 {
649
getDeviceProcAddr(VkDevice device,const char * pName)650 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
651 {
652 return reinterpret_cast<Device*>(device)->getProcAddr(pName);
653 }
654
createGraphicsPipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)655 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
656 {
657 deUint32 allocNdx;
658 try
659 {
660 for (allocNdx = 0; allocNdx < count; allocNdx++)
661 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
662
663 return VK_SUCCESS;
664 }
665 catch (const std::bad_alloc&)
666 {
667 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
668 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
669
670 return VK_ERROR_OUT_OF_HOST_MEMORY;
671 }
672 catch (VkResult err)
673 {
674 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
675 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
676
677 return err;
678 }
679 }
680
createComputePipelines(VkDevice device,VkPipelineCache,deUint32 count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)681 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
682 {
683 deUint32 allocNdx;
684 try
685 {
686 for (allocNdx = 0; allocNdx < count; allocNdx++)
687 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
688
689 return VK_SUCCESS;
690 }
691 catch (const std::bad_alloc&)
692 {
693 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
694 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
695
696 return VK_ERROR_OUT_OF_HOST_MEMORY;
697 }
698 catch (VkResult err)
699 {
700 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
701 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
702
703 return err;
704 }
705 }
706
enumeratePhysicalDevices(VkInstance,deUint32 * pPhysicalDeviceCount,VkPhysicalDevice * pDevices)707 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
708 {
709 if (pDevices && *pPhysicalDeviceCount >= 1u)
710 *pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
711
712 *pPhysicalDeviceCount = 1;
713
714 return VK_SUCCESS;
715 }
716
enumerateExtensions(deUint32 numExtensions,const VkExtensionProperties * extensions,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)717 VkResult enumerateExtensions (deUint32 numExtensions, const VkExtensionProperties* extensions, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
718 {
719 const deUint32 dstSize = pPropertyCount ? *pPropertyCount : 0;
720
721 if (pPropertyCount)
722 *pPropertyCount = numExtensions;
723
724 if (pProperties)
725 {
726 for (deUint32 ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
727 pProperties[ndx] = extensions[ndx];
728
729 if (dstSize < numExtensions)
730 return VK_INCOMPLETE;
731 }
732
733 return VK_SUCCESS;
734 }
735
enumerateInstanceExtensionProperties(const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)736 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
737 {
738 static const VkExtensionProperties s_extensions[] =
739 {
740 { "VK_KHR_get_physical_device_properties2", 1u },
741 { "VK_KHR_external_memory_capabilities", 1u },
742 };
743
744 if (!pLayerName)
745 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
746 else
747 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
748 }
749
enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,deUint32 * pPropertyCount,VkExtensionProperties * pProperties)750 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
751 {
752 DE_UNREF(physicalDevice);
753
754 static const VkExtensionProperties s_extensions[] =
755 {
756 { "VK_KHR_bind_memory2", 1u },
757 { "VK_KHR_external_memory", 1u },
758 { "VK_KHR_get_memory_requirements2", 1u },
759 { "VK_KHR_maintenance1", 1u },
760 { "VK_KHR_sampler_ycbcr_conversion", 1u },
761 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
762 { "VK_ANDROID_external_memory_android_hardware_buffer", 1u },
763 #endif
764 };
765
766 if (!pLayerName)
767 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
768 else
769 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
770 }
771
getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)772 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
773 {
774 DE_UNREF(physicalDevice);
775
776 // Enable all features allow as many tests to run as possible
777 pFeatures->robustBufferAccess = VK_TRUE;
778 pFeatures->fullDrawIndexUint32 = VK_TRUE;
779 pFeatures->imageCubeArray = VK_TRUE;
780 pFeatures->independentBlend = VK_TRUE;
781 pFeatures->geometryShader = VK_TRUE;
782 pFeatures->tessellationShader = VK_TRUE;
783 pFeatures->sampleRateShading = VK_TRUE;
784 pFeatures->dualSrcBlend = VK_TRUE;
785 pFeatures->logicOp = VK_TRUE;
786 pFeatures->multiDrawIndirect = VK_TRUE;
787 pFeatures->drawIndirectFirstInstance = VK_TRUE;
788 pFeatures->depthClamp = VK_TRUE;
789 pFeatures->depthBiasClamp = VK_TRUE;
790 pFeatures->fillModeNonSolid = VK_TRUE;
791 pFeatures->depthBounds = VK_TRUE;
792 pFeatures->wideLines = VK_TRUE;
793 pFeatures->largePoints = VK_TRUE;
794 pFeatures->alphaToOne = VK_TRUE;
795 pFeatures->multiViewport = VK_TRUE;
796 pFeatures->samplerAnisotropy = VK_TRUE;
797 pFeatures->textureCompressionETC2 = VK_TRUE;
798 pFeatures->textureCompressionASTC_LDR = VK_TRUE;
799 pFeatures->textureCompressionBC = VK_TRUE;
800 pFeatures->occlusionQueryPrecise = VK_TRUE;
801 pFeatures->pipelineStatisticsQuery = VK_TRUE;
802 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE;
803 pFeatures->fragmentStoresAndAtomics = VK_TRUE;
804 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE;
805 pFeatures->shaderImageGatherExtended = VK_TRUE;
806 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE;
807 pFeatures->shaderStorageImageMultisample = VK_TRUE;
808 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE;
809 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE;
810 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
811 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE;
812 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
813 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE;
814 pFeatures->shaderClipDistance = VK_TRUE;
815 pFeatures->shaderCullDistance = VK_TRUE;
816 pFeatures->shaderFloat64 = VK_TRUE;
817 pFeatures->shaderInt64 = VK_TRUE;
818 pFeatures->shaderInt16 = VK_TRUE;
819 pFeatures->shaderResourceResidency = VK_TRUE;
820 pFeatures->shaderResourceMinLod = VK_TRUE;
821 pFeatures->sparseBinding = VK_TRUE;
822 pFeatures->sparseResidencyBuffer = VK_TRUE;
823 pFeatures->sparseResidencyImage2D = VK_TRUE;
824 pFeatures->sparseResidencyImage3D = VK_TRUE;
825 pFeatures->sparseResidency2Samples = VK_TRUE;
826 pFeatures->sparseResidency4Samples = VK_TRUE;
827 pFeatures->sparseResidency8Samples = VK_TRUE;
828 pFeatures->sparseResidency16Samples = VK_TRUE;
829 pFeatures->sparseResidencyAliased = VK_TRUE;
830 pFeatures->variableMultisampleRate = VK_TRUE;
831 pFeatures->inheritedQueries = VK_TRUE;
832 }
833
getPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * props)834 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
835 {
836 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
837
838 props->apiVersion = VK_API_VERSION_1_1;
839 props->driverVersion = 1u;
840 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
841
842 deMemcpy(props->deviceName, "null", 5);
843
844 // Spec minmax
845 props->limits.maxImageDimension1D = 4096;
846 props->limits.maxImageDimension2D = 4096;
847 props->limits.maxImageDimension3D = 256;
848 props->limits.maxImageDimensionCube = 4096;
849 props->limits.maxImageArrayLayers = 256;
850 props->limits.maxTexelBufferElements = 65536;
851 props->limits.maxUniformBufferRange = 16384;
852 props->limits.maxStorageBufferRange = 1u<<27;
853 props->limits.maxPushConstantsSize = 128;
854 props->limits.maxMemoryAllocationCount = 4096;
855 props->limits.maxSamplerAllocationCount = 4000;
856 props->limits.bufferImageGranularity = 131072;
857 props->limits.sparseAddressSpaceSize = 1u<<31;
858 props->limits.maxBoundDescriptorSets = 4;
859 props->limits.maxPerStageDescriptorSamplers = 16;
860 props->limits.maxPerStageDescriptorUniformBuffers = 12;
861 props->limits.maxPerStageDescriptorStorageBuffers = 4;
862 props->limits.maxPerStageDescriptorSampledImages = 16;
863 props->limits.maxPerStageDescriptorStorageImages = 4;
864 props->limits.maxPerStageDescriptorInputAttachments = 4;
865 props->limits.maxPerStageResources = 128;
866 props->limits.maxDescriptorSetSamplers = 96;
867 props->limits.maxDescriptorSetUniformBuffers = 72;
868 props->limits.maxDescriptorSetUniformBuffersDynamic = 8;
869 props->limits.maxDescriptorSetStorageBuffers = 24;
870 props->limits.maxDescriptorSetStorageBuffersDynamic = 4;
871 props->limits.maxDescriptorSetSampledImages = 96;
872 props->limits.maxDescriptorSetStorageImages = 24;
873 props->limits.maxDescriptorSetInputAttachments = 4;
874 props->limits.maxVertexInputAttributes = 16;
875 props->limits.maxVertexInputBindings = 16;
876 props->limits.maxVertexInputAttributeOffset = 2047;
877 props->limits.maxVertexInputBindingStride = 2048;
878 props->limits.maxVertexOutputComponents = 64;
879 props->limits.maxTessellationGenerationLevel = 64;
880 props->limits.maxTessellationPatchSize = 32;
881 props->limits.maxTessellationControlPerVertexInputComponents = 64;
882 props->limits.maxTessellationControlPerVertexOutputComponents = 64;
883 props->limits.maxTessellationControlPerPatchOutputComponents = 120;
884 props->limits.maxTessellationControlTotalOutputComponents = 2048;
885 props->limits.maxTessellationEvaluationInputComponents = 64;
886 props->limits.maxTessellationEvaluationOutputComponents = 64;
887 props->limits.maxGeometryShaderInvocations = 32;
888 props->limits.maxGeometryInputComponents = 64;
889 props->limits.maxGeometryOutputComponents = 64;
890 props->limits.maxGeometryOutputVertices = 256;
891 props->limits.maxGeometryTotalOutputComponents = 1024;
892 props->limits.maxFragmentInputComponents = 64;
893 props->limits.maxFragmentOutputAttachments = 4;
894 props->limits.maxFragmentDualSrcAttachments = 1;
895 props->limits.maxFragmentCombinedOutputResources = 4;
896 props->limits.maxComputeSharedMemorySize = 16384;
897 props->limits.maxComputeWorkGroupCount[0] = 65535;
898 props->limits.maxComputeWorkGroupCount[1] = 65535;
899 props->limits.maxComputeWorkGroupCount[2] = 65535;
900 props->limits.maxComputeWorkGroupInvocations = 128;
901 props->limits.maxComputeWorkGroupSize[0] = 128;
902 props->limits.maxComputeWorkGroupSize[1] = 128;
903 props->limits.maxComputeWorkGroupSize[2] = 128;
904 props->limits.subPixelPrecisionBits = 4;
905 props->limits.subTexelPrecisionBits = 4;
906 props->limits.mipmapPrecisionBits = 4;
907 props->limits.maxDrawIndexedIndexValue = 0xffffffffu;
908 props->limits.maxDrawIndirectCount = (1u<<16) - 1u;
909 props->limits.maxSamplerLodBias = 2.0f;
910 props->limits.maxSamplerAnisotropy = 16.0f;
911 props->limits.maxViewports = 16;
912 props->limits.maxViewportDimensions[0] = 4096;
913 props->limits.maxViewportDimensions[1] = 4096;
914 props->limits.viewportBoundsRange[0] = -8192.f;
915 props->limits.viewportBoundsRange[1] = 8191.f;
916 props->limits.viewportSubPixelBits = 0;
917 props->limits.minMemoryMapAlignment = 64;
918 props->limits.minTexelBufferOffsetAlignment = 256;
919 props->limits.minUniformBufferOffsetAlignment = 256;
920 props->limits.minStorageBufferOffsetAlignment = 256;
921 props->limits.minTexelOffset = -8;
922 props->limits.maxTexelOffset = 7;
923 props->limits.minTexelGatherOffset = -8;
924 props->limits.maxTexelGatherOffset = 7;
925 props->limits.minInterpolationOffset = -0.5f;
926 props->limits.maxInterpolationOffset = 0.5f; // -1ulp
927 props->limits.subPixelInterpolationOffsetBits = 4;
928 props->limits.maxFramebufferWidth = 4096;
929 props->limits.maxFramebufferHeight = 4096;
930 props->limits.maxFramebufferLayers = 256;
931 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
932 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
933 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
934 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
935 props->limits.maxColorAttachments = 4;
936 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
937 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT;
938 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
939 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
940 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
941 props->limits.maxSampleMaskWords = 1;
942 props->limits.timestampComputeAndGraphics = VK_TRUE;
943 props->limits.timestampPeriod = 1.0f;
944 props->limits.maxClipDistances = 8;
945 props->limits.maxCullDistances = 8;
946 props->limits.maxCombinedClipAndCullDistances = 8;
947 props->limits.discreteQueuePriorities = 2;
948 props->limits.pointSizeRange[0] = 1.0f;
949 props->limits.pointSizeRange[1] = 64.0f; // -1ulp
950 props->limits.lineWidthRange[0] = 1.0f;
951 props->limits.lineWidthRange[1] = 8.0f; // -1ulp
952 props->limits.pointSizeGranularity = 1.0f;
953 props->limits.lineWidthGranularity = 1.0f;
954 props->limits.strictLines = 0;
955 props->limits.standardSampleLocations = VK_TRUE;
956 props->limits.optimalBufferCopyOffsetAlignment = 256;
957 props->limits.optimalBufferCopyRowPitchAlignment = 256;
958 props->limits.nonCoherentAtomSize = 128;
959 }
960
getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,deUint32 * count,VkQueueFamilyProperties * props)961 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
962 {
963 if (props && *count >= 1u)
964 {
965 deMemset(props, 0, sizeof(VkQueueFamilyProperties));
966
967 props->queueCount = 4u;
968 props->queueFlags = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
969 props->timestampValidBits = 64;
970 }
971
972 *count = 1u;
973 }
974
getPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * props)975 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
976 {
977 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
978
979 props->memoryTypeCount = 1u;
980 props->memoryTypes[0].heapIndex = 0u;
981 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
982 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
983 | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
984
985 props->memoryHeapCount = 1u;
986 props->memoryHeaps[0].size = 1ull << 31;
987 props->memoryHeaps[0].flags = 0u;
988 }
989
getPhysicalDeviceFormatProperties(VkPhysicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)990 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
991 {
992 const VkFormatFeatureFlags allFeatures = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
993 | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
994 | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
995 | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
996 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
997 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
998 | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
999 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
1000 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
1001 | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
1002 | VK_FORMAT_FEATURE_BLIT_SRC_BIT
1003 | VK_FORMAT_FEATURE_BLIT_DST_BIT
1004 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
1005 | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
1006 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
1007 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
1008 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
1009 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
1010 | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1011
1012 pFormatProperties->linearTilingFeatures = allFeatures;
1013 pFormatProperties->optimalTilingFeatures = allFeatures;
1014 pFormatProperties->bufferFeatures = allFeatures;
1015
1016 if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1017 pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1018 }
1019
getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1020 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
1021 {
1022 DE_UNREF(physicalDevice);
1023 DE_UNREF(format);
1024 DE_UNREF(type);
1025 DE_UNREF(tiling);
1026 DE_UNREF(usage);
1027 DE_UNREF(flags);
1028
1029 pImageFormatProperties->maxArrayLayers = 8;
1030 pImageFormatProperties->maxExtent.width = 4096;
1031 pImageFormatProperties->maxExtent.height = 4096;
1032 pImageFormatProperties->maxExtent.depth = 4096;
1033 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1;
1034 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u;
1035 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1036
1037 return VK_SUCCESS;
1038 }
1039
getDeviceQueue(VkDevice device,deUint32 queueFamilyIndex,deUint32 queueIndex,VkQueue * pQueue)1040 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
1041 {
1042 DE_UNREF(device);
1043 DE_UNREF(queueFamilyIndex);
1044
1045 if (pQueue)
1046 *pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1);
1047 }
1048
getBufferMemoryRequirements(VkDevice,VkBuffer bufferHandle,VkMemoryRequirements * requirements)1049 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
1050 {
1051 const Buffer* buffer = reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
1052
1053 requirements->memoryTypeBits = 1u;
1054 requirements->size = buffer->getSize();
1055 requirements->alignment = (VkDeviceSize)1u;
1056 }
1057
getPackedImageDataSize(VkFormat format,VkExtent3D extent,VkSampleCountFlagBits samples)1058 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1059 {
1060 return (VkDeviceSize)getPixelSize(mapVkFormat(format))
1061 * (VkDeviceSize)extent.width
1062 * (VkDeviceSize)extent.height
1063 * (VkDeviceSize)extent.depth
1064 * (VkDeviceSize)samples;
1065 }
1066
getCompressedImageDataSize(VkFormat format,VkExtent3D extent)1067 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
1068 {
1069 try
1070 {
1071 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
1072 const size_t blockSize = tcu::getBlockSize(tcuFormat);
1073 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat);
1074 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x());
1075 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y());
1076 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1077
1078 return blockSize*numBlocksX*numBlocksY*numBlocksZ;
1079 }
1080 catch (...)
1081 {
1082 return 0; // Unsupported compressed format
1083 }
1084 }
1085
getYCbCrImageDataSize(VkFormat format,VkExtent3D extent)1086 VkDeviceSize getYCbCrImageDataSize (VkFormat format, VkExtent3D extent)
1087 {
1088 const PlanarFormatDescription desc = getPlanarFormatDescription(format);
1089 VkDeviceSize totalSize = 0;
1090
1091 DE_ASSERT(extent.depth == 1);
1092
1093 for (deUint32 planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1094 {
1095 const deUint32 planeW = extent.width / desc.planes[planeNdx].widthDivisor;
1096 const deUint32 planeH = extent.height / desc.planes[planeNdx].heightDivisor;
1097 const deUint32 elementSize = desc.planes[planeNdx].elementSizeBytes;
1098
1099 totalSize = (VkDeviceSize)deAlign64((deInt64)totalSize, elementSize);
1100 totalSize += planeW * planeH * elementSize;
1101 }
1102
1103 return totalSize;
1104 }
1105
getImageMemoryRequirements(VkDevice,VkImage imageHandle,VkMemoryRequirements * requirements)1106 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
1107 {
1108 const Image* image = reinterpret_cast<const Image*>(imageHandle.getInternal());
1109
1110 requirements->memoryTypeBits = 1u;
1111 requirements->alignment = 16u;
1112
1113 if (isCompressedFormat(image->getFormat()))
1114 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1115 else if (isYCbCrFormat(image->getFormat()))
1116 requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1117 else
1118 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1119 }
1120
allocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1121 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
1122 {
1123 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1124 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1125
1126 if ((exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
1127 || (importInfo && importInfo->buffer.internal))
1128 {
1129 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1130 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1131 #else
1132 return VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR;
1133 #endif
1134 }
1135 else
1136 {
1137 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1138 }
1139 }
1140
mapMemory(VkDevice,VkDeviceMemory memHandle,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1141 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
1142 {
1143 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1144
1145 DE_UNREF(size);
1146 DE_UNREF(flags);
1147
1148 *ppData = (deUint8*)memory->map() + offset;
1149
1150 return VK_SUCCESS;
1151 }
1152
unmapMemory(VkDevice device,VkDeviceMemory memHandle)1153 VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memHandle)
1154 {
1155 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1156
1157 DE_UNREF(device);
1158
1159 memory->unmap();
1160 }
1161
getMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,pt::AndroidHardwareBufferPtr * pBuffer)1162 VKAPI_ATTR VkResult VKAPI_CALL getMemoryAndroidHardwareBufferANDROID (VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, pt::AndroidHardwareBufferPtr* pBuffer)
1163 {
1164 DE_UNREF(device);
1165
1166 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1167 DeviceMemory* const memory = reinterpret_cast<ExternalDeviceMemoryAndroid*>(pInfo->memory.getInternal());
1168 ExternalDeviceMemoryAndroid* const androidMemory = static_cast<ExternalDeviceMemoryAndroid*>(memory);
1169
1170 AHardwareBuffer* hwbuffer = androidMemory->getHwBuffer();
1171 AHardwareBuffer_acquire(hwbuffer);
1172 pBuffer->internal = hwbuffer;
1173 #else
1174 DE_UNREF(pInfo);
1175 DE_UNREF(pBuffer);
1176 #endif
1177
1178 return VK_SUCCESS;
1179 }
1180
allocateDescriptorSets(VkDevice,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1181 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
1182 {
1183 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
1184
1185 for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1186 {
1187 try
1188 {
1189 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1190 }
1191 catch (const std::bad_alloc&)
1192 {
1193 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1194 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1195
1196 return VK_ERROR_OUT_OF_HOST_MEMORY;
1197 }
1198 catch (VkResult res)
1199 {
1200 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1201 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1202
1203 return res;
1204 }
1205 }
1206
1207 return VK_SUCCESS;
1208 }
1209
freeDescriptorSets(VkDevice,VkDescriptorPool descriptorPool,deUint32 count,const VkDescriptorSet * pDescriptorSets)1210 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
1211 {
1212 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1213
1214 for (deUint32 ndx = 0; ndx < count; ++ndx)
1215 poolImpl->free(pDescriptorSets[ndx]);
1216 }
1217
resetDescriptorPool(VkDevice,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags)1218 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
1219 {
1220 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1221
1222 poolImpl->reset();
1223
1224 return VK_SUCCESS;
1225 }
1226
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)1227 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
1228 {
1229 DE_UNREF(device);
1230
1231 if (pAllocateInfo && pCommandBuffers)
1232 {
1233 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal());
1234
1235 for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1236 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1237 }
1238
1239 return VK_SUCCESS;
1240 }
1241
freeCommandBuffers(VkDevice device,VkCommandPool commandPool,deUint32 commandBufferCount,const VkCommandBuffer * pCommandBuffers)1242 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
1243 {
1244 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal());
1245
1246 DE_UNREF(device);
1247
1248 for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
1249 poolImpl->free(pCommandBuffers[ndx]);
1250 }
1251
1252
createDisplayModeKHR(VkPhysicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)1253 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode)
1254 {
1255 DE_UNREF(pAllocator);
1256 VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1257 }
1258
createSharedSwapchainsKHR(VkDevice device,deUint32 swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)1259 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains)
1260 {
1261 for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx)
1262 {
1263 pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator);
1264 }
1265
1266 return VK_SUCCESS;
1267 }
1268
getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1269 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties)
1270 {
1271 DE_UNREF(physicalDevice);
1272 DE_UNREF(pExternalBufferInfo);
1273
1274 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
1275 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1276 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0;
1277
1278 if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1279 {
1280 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR;
1281 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1282 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1283 }
1284 }
1285
getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1286 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties)
1287 {
1288 const VkPhysicalDeviceExternalImageFormatInfo* const externalInfo = findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1289 VkExternalImageFormatProperties* const externalProperties = findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1290 VkResult result;
1291
1292 result = getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1293 if (result != VK_SUCCESS)
1294 return result;
1295
1296 if (externalInfo && externalInfo->handleType != 0)
1297 {
1298 if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1299 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1300
1301 if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM
1302 || pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM
1303 || pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16
1304 || pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT
1305 || pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1306 {
1307 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1308 }
1309
1310 if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1311 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1312
1313 if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT
1314 | VK_IMAGE_USAGE_TRANSFER_DST_BIT
1315 | VK_IMAGE_USAGE_SAMPLED_BIT
1316 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
1317 != 0)
1318 {
1319 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1320 }
1321
1322 if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1323 /*| VK_IMAGE_CREATE_PROTECTED_BIT_KHR*/
1324 /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR*/))
1325 != 0)
1326 {
1327 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1328 }
1329
1330 if (externalProperties)
1331 {
1332 externalProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR
1333 | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR
1334 | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR;
1335 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1336 externalProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1337 }
1338 }
1339
1340 return VK_SUCCESS;
1341 }
1342
1343 // \note getInstanceProcAddr is a little bit special:
1344 // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1345 // getInstanceProcAddr() implementation needs other entry points from
1346 // vkNullDriverImpl.inl.
1347 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName);
1348
1349 #include "vkNullDriverImpl.inl"
1350
getInstanceProcAddr(VkInstance instance,const char * pName)1351 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
1352 {
1353 if (instance)
1354 {
1355 return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
1356 }
1357 else
1358 {
1359 const std::string name = pName;
1360
1361 if (name == "vkCreateInstance")
1362 return (PFN_vkVoidFunction)createInstance;
1363 else if (name == "vkEnumerateInstanceExtensionProperties")
1364 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1365 else if (name == "vkEnumerateInstanceLayerProperties")
1366 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1367 else
1368 return (PFN_vkVoidFunction)DE_NULL;
1369 }
1370 }
1371
1372 } // extern "C"
1373
Instance(const VkInstanceCreateInfo *)1374 Instance::Instance (const VkInstanceCreateInfo*)
1375 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1376 {
1377 }
1378
Device(VkPhysicalDevice,const VkDeviceCreateInfo *)1379 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
1380 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1381 {
1382 }
1383
1384 class NullDriverLibrary : public Library
1385 {
1386 public:
NullDriverLibrary(void)1387 NullDriverLibrary (void)
1388 : m_library (s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1389 , m_driver (m_library)
1390 {}
1391
getPlatformInterface(void) const1392 const PlatformInterface& getPlatformInterface (void) const { return m_driver; }
getFunctionLibrary(void) const1393 const tcu::FunctionLibrary& getFunctionLibrary (void) const { return m_library; }
1394 private:
1395 const tcu::StaticFunctionLibrary m_library;
1396 const PlatformDriver m_driver;
1397 };
1398
1399 } // anonymous
1400
createNullDriver(void)1401 Library* createNullDriver (void)
1402 {
1403 return new NullDriverLibrary();
1404 }
1405
1406 } // vk
1407