• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 //    Helper functions for the Vulkan Renderer.
8 //
9 
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11 
12 #include "libANGLE/Context.h"
13 #include "libANGLE/renderer/vulkan/BufferVk.h"
14 #include "libANGLE/renderer/vulkan/ContextVk.h"
15 #include "libANGLE/renderer/vulkan/DisplayVk.h"
16 #include "libANGLE/renderer/vulkan/RendererVk.h"
17 #include "libANGLE/renderer/vulkan/ResourceVk.h"
18 #include "libANGLE/renderer/vulkan/android/vk_android_utils.h"
19 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
20 
21 namespace angle
22 {
ToEGL(Result result,rx::DisplayVk * displayVk,EGLint errorCode)23 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode)
24 {
25     if (result != angle::Result::Continue)
26     {
27         return displayVk->getEGLError(errorCode);
28     }
29     else
30     {
31         return egl::NoError();
32     }
33 }
34 }  // namespace angle
35 
36 namespace rx
37 {
38 namespace
39 {
40 // Pick an arbitrary value to initialize non-zero memory for sanitization.  Note that 0x3F3F3F3F
41 // as float is about 0.75.
42 constexpr int kNonZeroInitValue = 0x3F;
43 
GetStagingBufferUsageFlags(vk::StagingUsage usage)44 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
45 {
46     switch (usage)
47     {
48         case vk::StagingUsage::Read:
49             return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
50         case vk::StagingUsage::Write:
51             return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
52         case vk::StagingUsage::Both:
53             return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
54         default:
55             UNREACHABLE();
56             return 0;
57     }
58 }
59 
FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties & memoryProperties,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut)60 bool FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties &memoryProperties,
61                           const VkMemoryRequirements &memoryRequirements,
62                           VkMemoryPropertyFlags requestedMemoryPropertyFlags,
63                           VkMemoryPropertyFlags *memoryPropertyFlagsOut,
64                           uint32_t *typeIndexOut)
65 {
66     for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
67     {
68         ASSERT(memoryIndex < memoryProperties.memoryTypeCount);
69 
70         if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
71              requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
72         {
73             *memoryPropertyFlagsOut = memoryProperties.memoryTypes[memoryIndex].propertyFlags;
74             *typeIndexOut           = static_cast<uint32_t>(memoryIndex);
75             return true;
76         }
77     }
78 
79     return false;
80 }
81 
FindAndAllocateCompatibleMemory(vk::Context * context,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,vk::DeviceMemory * deviceMemoryOut)82 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
83                                               const vk::MemoryProperties &memoryProperties,
84                                               VkMemoryPropertyFlags requestedMemoryPropertyFlags,
85                                               VkMemoryPropertyFlags *memoryPropertyFlagsOut,
86                                               const VkMemoryRequirements &memoryRequirements,
87                                               const void *extraAllocationInfo,
88                                               vk::DeviceMemory *deviceMemoryOut)
89 {
90     VkDevice device = context->getDevice();
91 
92     uint32_t memoryTypeIndex = 0;
93     ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(
94         context, memoryRequirements, requestedMemoryPropertyFlags, (extraAllocationInfo != nullptr),
95         memoryPropertyFlagsOut, &memoryTypeIndex));
96 
97     VkMemoryAllocateInfo allocInfo = {};
98     allocInfo.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
99     allocInfo.pNext                = extraAllocationInfo;
100     allocInfo.memoryTypeIndex      = memoryTypeIndex;
101     allocInfo.allocationSize       = memoryRequirements.size;
102 
103     ANGLE_VK_TRY(context, deviceMemoryOut->allocate(device, allocInfo));
104 
105     // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
106     // invalid values ensures our testing doesn't assume zero-initialized memory.
107     RendererVk *renderer = context->getRenderer();
108     if (renderer->getFeatures().allocateNonZeroMemory.enabled)
109     {
110         if ((*memoryPropertyFlagsOut & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
111         {
112             // Can map the memory.
113             ANGLE_TRY(vk::InitMappableDeviceMemory(context, deviceMemoryOut,
114                                                    memoryRequirements.size, kNonZeroInitValue,
115                                                    *memoryPropertyFlagsOut));
116         }
117     }
118 
119     return angle::Result::Continue;
120 }
121 
122 template <typename T>
123 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
124                                                  VkMemoryPropertyFlags requestedMemoryPropertyFlags,
125                                                  VkMemoryPropertyFlags *memoryPropertyFlagsOut,
126                                                  const VkMemoryRequirements &memoryRequirements,
127                                                  const void *extraAllocationInfo,
128                                                  const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
129                                                  T *bufferOrImage,
130                                                  vk::DeviceMemory *deviceMemoryOut);
131 
132 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Image * image,vk::DeviceMemory * deviceMemoryOut)133 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
134                                                  VkMemoryPropertyFlags requestedMemoryPropertyFlags,
135                                                  VkMemoryPropertyFlags *memoryPropertyFlagsOut,
136                                                  const VkMemoryRequirements &memoryRequirements,
137                                                  const void *extraAllocationInfo,
138                                                  const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
139                                                  vk::Image *image,
140                                                  vk::DeviceMemory *deviceMemoryOut)
141 {
142     const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
143 
144     ANGLE_TRY(FindAndAllocateCompatibleMemory(
145         context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
146         memoryRequirements, extraAllocationInfo, deviceMemoryOut));
147 
148     if (extraBindInfo)
149     {
150         VkBindImageMemoryInfoKHR bindInfo = {};
151         bindInfo.sType                    = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
152         bindInfo.pNext                    = extraBindInfo;
153         bindInfo.image                    = image->getHandle();
154         bindInfo.memory                   = deviceMemoryOut->getHandle();
155         bindInfo.memoryOffset             = 0;
156 
157         ANGLE_VK_TRY(context, image->bindMemory2(context->getDevice(), bindInfo));
158     }
159     else
160     {
161         ANGLE_VK_TRY(context, image->bindMemory(context->getDevice(), *deviceMemoryOut));
162     }
163 
164     return angle::Result::Continue;
165 }
166 
167 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Buffer * buffer,vk::DeviceMemory * deviceMemoryOut)168 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
169                                                  VkMemoryPropertyFlags requestedMemoryPropertyFlags,
170                                                  VkMemoryPropertyFlags *memoryPropertyFlagsOut,
171                                                  const VkMemoryRequirements &memoryRequirements,
172                                                  const void *extraAllocationInfo,
173                                                  const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
174                                                  vk::Buffer *buffer,
175                                                  vk::DeviceMemory *deviceMemoryOut)
176 {
177     ASSERT(extraBindInfo == nullptr);
178 
179     const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
180 
181     ANGLE_TRY(FindAndAllocateCompatibleMemory(
182         context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
183         memoryRequirements, extraAllocationInfo, deviceMemoryOut));
184     ANGLE_VK_TRY(context, buffer->bindMemory(context->getDevice(), *deviceMemoryOut, 0));
185     return angle::Result::Continue;
186 }
187 
188 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)189 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
190                                           VkMemoryPropertyFlags requestedMemoryPropertyFlags,
191                                           VkMemoryPropertyFlags *memoryPropertyFlagsOut,
192                                           const void *extraAllocationInfo,
193                                           T *bufferOrImage,
194                                           vk::DeviceMemory *deviceMemoryOut,
195                                           VkDeviceSize *sizeOut)
196 {
197     // Call driver to determine memory requirements.
198     VkMemoryRequirements memoryRequirements;
199     bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
200 
201     ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
202         context, requestedMemoryPropertyFlags, memoryPropertyFlagsOut, memoryRequirements,
203         extraAllocationInfo, nullptr, bufferOrImage, deviceMemoryOut));
204 
205     *sizeOut = memoryRequirements.size;
206 
207     return angle::Result::Continue;
208 }
209 
210 // Unified layer that includes full validation layer stack
211 constexpr char kVkKhronosValidationLayerName[]  = "VK_LAYER_KHRONOS_validation";
212 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
213 const char *kVkValidationLayerNames[]           = {
214     "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
215     "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
216     "VK_LAYER_GOOGLE_unique_objects"};
217 
HasValidationLayer(const std::vector<VkLayerProperties> & layerProps,const char * layerName)218 bool HasValidationLayer(const std::vector<VkLayerProperties> &layerProps, const char *layerName)
219 {
220     for (const auto &layerProp : layerProps)
221     {
222         if (std::string(layerProp.layerName) == layerName)
223         {
224             return true;
225         }
226     }
227 
228     return false;
229 }
230 
HasKhronosValidationLayer(const std::vector<VkLayerProperties> & layerProps)231 bool HasKhronosValidationLayer(const std::vector<VkLayerProperties> &layerProps)
232 {
233     return HasValidationLayer(layerProps, kVkKhronosValidationLayerName);
234 }
235 
HasStandardValidationLayer(const std::vector<VkLayerProperties> & layerProps)236 bool HasStandardValidationLayer(const std::vector<VkLayerProperties> &layerProps)
237 {
238     return HasValidationLayer(layerProps, kVkStandardValidationLayerName);
239 }
240 
HasValidationLayers(const std::vector<VkLayerProperties> & layerProps)241 bool HasValidationLayers(const std::vector<VkLayerProperties> &layerProps)
242 {
243     for (const char *layerName : kVkValidationLayerNames)
244     {
245         if (!HasValidationLayer(layerProps, layerName))
246         {
247             return false;
248         }
249     }
250 
251     return true;
252 }
253 }  // anonymous namespace
254 
VulkanResultString(VkResult result)255 const char *VulkanResultString(VkResult result)
256 {
257     switch (result)
258     {
259         case VK_SUCCESS:
260             return "Command successfully completed";
261         case VK_NOT_READY:
262             return "A fence or query has not yet completed";
263         case VK_TIMEOUT:
264             return "A wait operation has not completed in the specified time";
265         case VK_EVENT_SET:
266             return "An event is signaled";
267         case VK_EVENT_RESET:
268             return "An event is unsignaled";
269         case VK_INCOMPLETE:
270             return "A return array was too small for the result";
271         case VK_SUBOPTIMAL_KHR:
272             return "A swapchain no longer matches the surface properties exactly, but can still be "
273                    "used to present to the surface successfully";
274         case VK_ERROR_OUT_OF_HOST_MEMORY:
275             return "A host memory allocation has failed";
276         case VK_ERROR_OUT_OF_DEVICE_MEMORY:
277             return "A device memory allocation has failed";
278         case VK_ERROR_INITIALIZATION_FAILED:
279             return "Initialization of an object could not be completed for implementation-specific "
280                    "reasons";
281         case VK_ERROR_DEVICE_LOST:
282             return "The logical or physical device has been lost";
283         case VK_ERROR_MEMORY_MAP_FAILED:
284             return "Mapping of a memory object has failed";
285         case VK_ERROR_LAYER_NOT_PRESENT:
286             return "A requested layer is not present or could not be loaded";
287         case VK_ERROR_EXTENSION_NOT_PRESENT:
288             return "A requested extension is not supported";
289         case VK_ERROR_FEATURE_NOT_PRESENT:
290             return "A requested feature is not supported";
291         case VK_ERROR_INCOMPATIBLE_DRIVER:
292             return "The requested version of Vulkan is not supported by the driver or is otherwise "
293                    "incompatible for implementation-specific reasons";
294         case VK_ERROR_TOO_MANY_OBJECTS:
295             return "Too many objects of the type have already been created";
296         case VK_ERROR_FORMAT_NOT_SUPPORTED:
297             return "A requested format is not supported on this device";
298         case VK_ERROR_SURFACE_LOST_KHR:
299             return "A surface is no longer available";
300         case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
301             return "The requested window is already connected to a VkSurfaceKHR, or to some other "
302                    "non-Vulkan API";
303         case VK_ERROR_OUT_OF_DATE_KHR:
304             return "A surface has changed in such a way that it is no longer compatible with the "
305                    "swapchain";
306         case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
307             return "The display used by a swapchain does not use the same presentable image "
308                    "layout, or is incompatible in a way that prevents sharing an image";
309         case VK_ERROR_VALIDATION_FAILED_EXT:
310             return "The validation layers detected invalid API usage";
311         case VK_ERROR_INVALID_SHADER_NV:
312             return "Invalid Vulkan shader was generated";
313         case VK_ERROR_OUT_OF_POOL_MEMORY:
314             return "A pool memory allocation has failed";
315         case VK_ERROR_FRAGMENTED_POOL:
316             return "A pool allocation has failed due to fragmentation of the pool's memory";
317         case VK_ERROR_INVALID_EXTERNAL_HANDLE:
318             return "An external handle is not a valid handle of the specified type";
319         default:
320             return "Unknown vulkan error code";
321     }
322 }
323 
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)324 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
325                                   bool mustHaveLayers,
326                                   VulkanLayerVector *enabledLayerNames)
327 {
328     // Favor unified Khronos layer, but fallback to standard validation
329     if (HasKhronosValidationLayer(layerProps))
330     {
331         enabledLayerNames->push_back(kVkKhronosValidationLayerName);
332     }
333     else if (HasStandardValidationLayer(layerProps))
334     {
335         enabledLayerNames->push_back(kVkStandardValidationLayerName);
336     }
337     else if (HasValidationLayers(layerProps))
338     {
339         for (const char *layerName : kVkValidationLayerNames)
340         {
341             enabledLayerNames->push_back(layerName);
342         }
343     }
344     else
345     {
346         // Generate an error if the layers were explicitly requested, warning otherwise.
347         if (mustHaveLayers)
348         {
349             ERR() << "Vulkan validation layers are missing.";
350         }
351         else
352         {
353             WARN() << "Vulkan validation layers are missing.";
354         }
355 
356         return false;
357     }
358 
359     return true;
360 }
361 
362 namespace vk
363 {
364 const char *gLoaderLayersPathEnv   = "VK_LAYER_PATH";
365 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
366 
GetDepthStencilAspectFlags(const angle::Format & format)367 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
368 {
369     return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
370            (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
371 }
372 
GetFormatAspectFlags(const angle::Format & format)373 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
374 {
375     VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
376     // If the image is not depth stencil, assume color aspect.  Note that detecting color formats
377     // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
378     // channels.
379     return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
380 }
381 
382 // Context implementation.
Context(RendererVk * renderer)383 Context::Context(RendererVk *renderer) : mRenderer(renderer), mPerfCounters{} {}
384 
~Context()385 Context::~Context() {}
386 
getDevice() const387 VkDevice Context::getDevice() const
388 {
389     return mRenderer->getDevice();
390 }
391 
392 // MemoryProperties implementation.
MemoryProperties()393 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
394 
init(VkPhysicalDevice physicalDevice)395 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
396 {
397     ASSERT(mMemoryProperties.memoryTypeCount == 0);
398     vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
399     ASSERT(mMemoryProperties.memoryTypeCount > 0);
400 }
401 
destroy()402 void MemoryProperties::destroy()
403 {
404     mMemoryProperties = {};
405 }
406 
hasLazilyAllocatedMemory() const407 bool MemoryProperties::hasLazilyAllocatedMemory() const
408 {
409     for (uint32_t typeIndex = 0; typeIndex < mMemoryProperties.memoryTypeCount; ++typeIndex)
410     {
411         const VkMemoryType &memoryType = mMemoryProperties.memoryTypes[typeIndex];
412         if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
413         {
414             return true;
415         }
416     }
417     return false;
418 }
419 
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,bool isExternalMemory,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const420 angle::Result MemoryProperties::findCompatibleMemoryIndex(
421     Context *context,
422     const VkMemoryRequirements &memoryRequirements,
423     VkMemoryPropertyFlags requestedMemoryPropertyFlags,
424     bool isExternalMemory,
425     VkMemoryPropertyFlags *memoryPropertyFlagsOut,
426     uint32_t *typeIndexOut) const
427 {
428     ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
429 
430     // Find a compatible memory pool index. If the index doesn't change, we could cache it.
431     // Not finding a valid memory pool means an out-of-spec driver, or internal error.
432     // TODO(jmadill): Determine if it is possible to cache indexes.
433     // TODO(jmadill): More efficient memory allocation.
434     if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, requestedMemoryPropertyFlags,
435                              memoryPropertyFlagsOut, typeIndexOut))
436     {
437         return angle::Result::Continue;
438     }
439 
440     // We did not find a compatible memory type.  If the caller wanted a host visible memory, just
441     // return the memory index with fallback, guaranteed, memory flags.
442     if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
443     {
444         // The Vulkan spec says the following -
445         //     There must be at least one memory type with both the
446         //     VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
447         //     bits set in its propertyFlags
448         constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
449             VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
450 
451         if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, fallbackMemoryPropertyFlags,
452                                  memoryPropertyFlagsOut, typeIndexOut))
453         {
454             return angle::Result::Continue;
455         }
456     }
457 
458     // We did not find a compatible memory type. When importing external memory, there may be
459     // additional restrictions on memoryType. Fallback to requesting device local memory.
460     if (isExternalMemory)
461     {
462         // The Vulkan spec says the following -
463         //     There must be at least one memory type with the VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
464         //     bit set in its propertyFlags
465         if (FindCompatibleMemory(mMemoryProperties, memoryRequirements,
466                                  VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, memoryPropertyFlagsOut,
467                                  typeIndexOut))
468         {
469             return angle::Result::Continue;
470         }
471     }
472 
473     // TODO(jmadill): Add error message to error.
474     context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
475     return angle::Result::Stop;
476 }
477 
478 // StagingBuffer implementation.
StagingBuffer()479 StagingBuffer::StagingBuffer() : mSize(0) {}
480 
destroy(RendererVk * renderer)481 void StagingBuffer::destroy(RendererVk *renderer)
482 {
483     VkDevice device = renderer->getDevice();
484     mBuffer.destroy(device);
485     mAllocation.destroy(renderer->getAllocator());
486     mSize = 0;
487 }
488 
init(Context * context,VkDeviceSize size,StagingUsage usage)489 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
490 {
491     VkBufferCreateInfo createInfo    = {};
492     createInfo.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
493     createInfo.flags                 = 0;
494     createInfo.size                  = size;
495     createInfo.usage                 = GetStagingBufferUsageFlags(usage);
496     createInfo.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
497     createInfo.queueFamilyIndexCount = 0;
498     createInfo.pQueueFamilyIndices   = nullptr;
499 
500     VkMemoryPropertyFlags preferredFlags = 0;
501     VkMemoryPropertyFlags requiredFlags =
502         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
503 
504     RendererVk *renderer       = context->getRenderer();
505     const Allocator &allocator = renderer->getAllocator();
506 
507     uint32_t memoryTypeIndex = 0;
508     ANGLE_VK_TRY(context,
509                  allocator.createBuffer(createInfo, requiredFlags, preferredFlags,
510                                         renderer->getFeatures().persistentlyMappedBuffers.enabled,
511                                         &memoryTypeIndex, &mBuffer, &mAllocation));
512     mSize = static_cast<size_t>(size);
513 
514     // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
515     // invalid values ensures our testing doesn't assume zero-initialized memory.
516     if (renderer->getFeatures().allocateNonZeroMemory.enabled)
517     {
518         ANGLE_TRY(InitMappableAllocation(context, allocator, &mAllocation, size, kNonZeroInitValue,
519                                          requiredFlags));
520     }
521 
522     return angle::Result::Continue;
523 }
524 
release(ContextVk * contextVk)525 void StagingBuffer::release(ContextVk *contextVk)
526 {
527     contextVk->addGarbage(&mBuffer);
528     contextVk->addGarbage(&mAllocation);
529 }
530 
collectGarbage(RendererVk * renderer,Serial serial)531 void StagingBuffer::collectGarbage(RendererVk *renderer, Serial serial)
532 {
533     GarbageList garbageList;
534     garbageList.emplace_back(GetGarbage(&mBuffer));
535     garbageList.emplace_back(GetGarbage(&mAllocation));
536 
537     SharedResourceUse sharedUse;
538     sharedUse.init();
539     sharedUse.updateSerialOneOff(serial);
540     renderer->collectGarbage(std::move(sharedUse), std::move(garbageList));
541 }
542 
InitMappableAllocation(Context * context,const Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)543 angle::Result InitMappableAllocation(Context *context,
544                                      const Allocator &allocator,
545                                      Allocation *allocation,
546                                      VkDeviceSize size,
547                                      int value,
548                                      VkMemoryPropertyFlags memoryPropertyFlags)
549 {
550     uint8_t *mapPointer;
551     ANGLE_VK_TRY(context, allocation->map(allocator, &mapPointer));
552     memset(mapPointer, value, static_cast<size_t>(size));
553 
554     if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
555     {
556         allocation->flush(allocator, 0, size);
557     }
558 
559     allocation->unmap(allocator);
560 
561     return angle::Result::Continue;
562 }
563 
InitMappableDeviceMemory(Context * context,DeviceMemory * deviceMemory,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)564 angle::Result InitMappableDeviceMemory(Context *context,
565                                        DeviceMemory *deviceMemory,
566                                        VkDeviceSize size,
567                                        int value,
568                                        VkMemoryPropertyFlags memoryPropertyFlags)
569 {
570     VkDevice device = context->getDevice();
571 
572     uint8_t *mapPointer;
573     ANGLE_VK_TRY(context, deviceMemory->map(device, 0, VK_WHOLE_SIZE, 0, &mapPointer));
574     memset(mapPointer, value, static_cast<size_t>(size));
575 
576     // if the memory type is not host coherent, we perform an explicit flush
577     if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
578     {
579         VkMappedMemoryRange mappedRange = {};
580         mappedRange.sType               = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
581         mappedRange.memory              = deviceMemory->getHandle();
582         mappedRange.size                = VK_WHOLE_SIZE;
583         ANGLE_VK_TRY(context, vkFlushMappedMemoryRanges(device, 1, &mappedRange));
584     }
585 
586     deviceMemory->unmap(device);
587 
588     return angle::Result::Continue;
589 }
590 
AllocateBufferMemory(Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)591 angle::Result AllocateBufferMemory(Context *context,
592                                    VkMemoryPropertyFlags requestedMemoryPropertyFlags,
593                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
594                                    const void *extraAllocationInfo,
595                                    Buffer *buffer,
596                                    DeviceMemory *deviceMemoryOut,
597                                    VkDeviceSize *sizeOut)
598 {
599     return AllocateBufferOrImageMemory(context, requestedMemoryPropertyFlags,
600                                        memoryPropertyFlagsOut, extraAllocationInfo, buffer,
601                                        deviceMemoryOut, sizeOut);
602 }
603 
AllocateImageMemory(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)604 angle::Result AllocateImageMemory(Context *context,
605                                   VkMemoryPropertyFlags memoryPropertyFlags,
606                                   VkMemoryPropertyFlags *memoryPropertyFlagsOut,
607                                   const void *extraAllocationInfo,
608                                   Image *image,
609                                   DeviceMemory *deviceMemoryOut,
610                                   VkDeviceSize *sizeOut)
611 {
612     return AllocateBufferOrImageMemory(context, memoryPropertyFlags, memoryPropertyFlagsOut,
613                                        extraAllocationInfo, image, deviceMemoryOut, sizeOut);
614 }
615 
AllocateImageMemoryWithRequirements(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,Image * image,DeviceMemory * deviceMemoryOut)616 angle::Result AllocateImageMemoryWithRequirements(
617     Context *context,
618     VkMemoryPropertyFlags memoryPropertyFlags,
619     const VkMemoryRequirements &memoryRequirements,
620     const void *extraAllocationInfo,
621     const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
622     Image *image,
623     DeviceMemory *deviceMemoryOut)
624 {
625     VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
626     return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
627                                               memoryRequirements, extraAllocationInfo,
628                                               extraBindInfo, image, deviceMemoryOut);
629 }
630 
AllocateBufferMemoryWithRequirements(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Buffer * buffer,VkMemoryPropertyFlags * memoryPropertyFlagsOut,DeviceMemory * deviceMemoryOut)631 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
632                                                    VkMemoryPropertyFlags memoryPropertyFlags,
633                                                    const VkMemoryRequirements &memoryRequirements,
634                                                    const void *extraAllocationInfo,
635                                                    Buffer *buffer,
636                                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
637                                                    DeviceMemory *deviceMemoryOut)
638 {
639     return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, memoryPropertyFlagsOut,
640                                               memoryRequirements, extraAllocationInfo, nullptr,
641                                               buffer, deviceMemoryOut);
642 }
643 
InitShaderAndSerial(Context * context,ShaderAndSerial * shaderAndSerial,const uint32_t * shaderCode,size_t shaderCodeSize)644 angle::Result InitShaderAndSerial(Context *context,
645                                   ShaderAndSerial *shaderAndSerial,
646                                   const uint32_t *shaderCode,
647                                   size_t shaderCodeSize)
648 {
649     VkShaderModuleCreateInfo createInfo = {};
650     createInfo.sType                    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
651     createInfo.flags                    = 0;
652     createInfo.codeSize                 = shaderCodeSize;
653     createInfo.pCode                    = shaderCode;
654 
655     ANGLE_VK_TRY(context, shaderAndSerial->get().init(context->getDevice(), createInfo));
656     shaderAndSerial->updateSerial(context->getRenderer()->issueShaderSerial());
657     return angle::Result::Continue;
658 }
659 
Get2DTextureType(uint32_t layerCount,GLint samples)660 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
661 {
662     if (layerCount > 1)
663     {
664         if (samples > 1)
665         {
666             return gl::TextureType::_2DMultisampleArray;
667         }
668         else
669         {
670             return gl::TextureType::_2DArray;
671         }
672     }
673     else
674     {
675         if (samples > 1)
676         {
677             return gl::TextureType::_2DMultisample;
678         }
679         else
680         {
681             return gl::TextureType::_2D;
682         }
683     }
684 }
685 
GarbageObject()686 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
687 
GarbageObject(HandleType handleType,GarbageHandle handle)688 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
689     : mHandleType(handleType), mHandle(handle)
690 {}
691 
GarbageObject(GarbageObject && other)692 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
693 {
694     *this = std::move(other);
695 }
696 
operator =(GarbageObject && rhs)697 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
698 {
699     std::swap(mHandle, rhs.mHandle);
700     std::swap(mHandleType, rhs.mHandleType);
701     return *this;
702 }
703 
704 // GarbageObject implementation
705 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
706 //  which fails to compile with reinterpret_cast, requiring static_cast.
destroy(RendererVk * renderer)707 void GarbageObject::destroy(RendererVk *renderer)
708 {
709     ANGLE_TRACE_EVENT0("gpu.angle", "GarbageObject::destroy");
710     VkDevice device = renderer->getDevice();
711     switch (mHandleType)
712     {
713         case HandleType::Semaphore:
714             vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
715             break;
716         case HandleType::CommandBuffer:
717             // Command buffers are pool allocated.
718             UNREACHABLE();
719             break;
720         case HandleType::Event:
721             vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
722             break;
723         case HandleType::Fence:
724             vkDestroyFence(device, (VkFence)mHandle, nullptr);
725             break;
726         case HandleType::DeviceMemory:
727             vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
728             break;
729         case HandleType::Buffer:
730             vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
731             break;
732         case HandleType::BufferView:
733             vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
734             break;
735         case HandleType::Image:
736             vkDestroyImage(device, (VkImage)mHandle, nullptr);
737             break;
738         case HandleType::ImageView:
739             vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
740             break;
741         case HandleType::ShaderModule:
742             vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
743             break;
744         case HandleType::PipelineLayout:
745             vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
746             break;
747         case HandleType::RenderPass:
748             vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
749             break;
750         case HandleType::Pipeline:
751             vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
752             break;
753         case HandleType::DescriptorSetLayout:
754             vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
755             break;
756         case HandleType::Sampler:
757             vkDestroySampler(device, (VkSampler)mHandle, nullptr);
758             break;
759         case HandleType::DescriptorPool:
760             vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
761             break;
762         case HandleType::Framebuffer:
763             vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
764             break;
765         case HandleType::CommandPool:
766             vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
767             break;
768         case HandleType::QueryPool:
769             vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
770             break;
771         case HandleType::Allocation:
772             vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
773             break;
774         default:
775             UNREACHABLE();
776             break;
777     }
778 
779     renderer->onDeallocateHandle(mHandleType);
780 }
781 
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)782 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
783 {
784     static constexpr angle::ColorF kLabelColors[6] = {
785         angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f),  // DEBUG_SOURCE_API
786         angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f),  // DEBUG_SOURCE_WINDOW_SYSTEM
787         angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f),  // DEBUG_SOURCE_SHADER_COMPILER
788         angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f),  // DEBUG_SOURCE_THIRD_PARTY
789         angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f),  // DEBUG_SOURCE_APPLICATION
790         angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f),  // DEBUG_SOURCE_OTHER
791     };
792 
793     int colorIndex = source - GL_DEBUG_SOURCE_API;
794     ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
795 
796     label->sType      = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
797     label->pNext      = nullptr;
798     label->pLabelName = marker;
799     kLabelColors[colorIndex].writeData(label->color);
800 }
801 
802 // ClearValuesArray implementation.
ClearValuesArray()803 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
804 
805 ClearValuesArray::~ClearValuesArray() = default;
806 
807 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
808 
809 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
810 
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)811 void ClearValuesArray::store(uint32_t index,
812                              VkImageAspectFlags aspectFlags,
813                              const VkClearValue &clearValue)
814 {
815     ASSERT(aspectFlags != 0);
816 
817     // We do this double if to handle the packed depth-stencil case.
818     if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
819     {
820         // Ensure for packed DS we're writing to the depth index.
821         ASSERT(index == kUnpackedDepthIndex ||
822                (index == kUnpackedStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
823 
824         storeNoDepthStencil(kUnpackedStencilIndex, clearValue);
825     }
826 
827     if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
828     {
829         storeNoDepthStencil(index, clearValue);
830     }
831 }
832 
storeNoDepthStencil(uint32_t index,const VkClearValue & clearValue)833 void ClearValuesArray::storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue)
834 {
835     mValues[index] = clearValue;
836     mEnabled.set(index);
837 }
838 
getColorMask() const839 gl::DrawBufferMask ClearValuesArray::getColorMask() const
840 {
841     constexpr uint32_t kColorBuffersMask =
842         angle::BitMask<uint32_t>(gl::IMPLEMENTATION_MAX_DRAW_BUFFERS);
843     return gl::DrawBufferMask(mEnabled.bits() & kColorBuffersMask);
844 }
845 
846 // ResourceSerialFactory implementation.
ResourceSerialFactory()847 ResourceSerialFactory::ResourceSerialFactory() : mCurrentUniqueSerial(1) {}
848 
~ResourceSerialFactory()849 ResourceSerialFactory::~ResourceSerialFactory() {}
850 
issueSerial()851 uint32_t ResourceSerialFactory::issueSerial()
852 {
853     uint32_t newSerial = ++mCurrentUniqueSerial;
854     // make sure serial does not wrap
855     ASSERT(newSerial > 0);
856     return newSerial;
857 }
858 
859 #define ANGLE_DEFINE_GEN_VK_SERIAL(Type)                         \
860     Type##Serial ResourceSerialFactory::generate##Type##Serial() \
861     {                                                            \
862         return Type##Serial(issueSerial());                      \
863     }
864 
ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)865 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)
866 
867 void ClampViewport(VkViewport *viewport)
868 {
869     // 0-sized viewports are invalid in Vulkan.
870     ASSERT(viewport);
871     if (viewport->width == 0.0f)
872     {
873         viewport->width = 1.0f;
874     }
875     if (viewport->height == 0.0f)
876     {
877         viewport->height = 1.0f;
878     }
879 }
880 
881 }  // namespace vk
882 
883 #if !defined(ANGLE_SHARED_LIBVULKAN)
884 // VK_EXT_debug_utils
885 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT   = nullptr;
886 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
887 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT       = nullptr;
888 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT           = nullptr;
889 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT     = nullptr;
890 
891 // VK_EXT_debug_report
892 PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT   = nullptr;
893 PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
894 
895 // VK_KHR_get_physical_device_properties2
896 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR             = nullptr;
897 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR                 = nullptr;
898 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = nullptr;
899 
900 // VK_KHR_external_semaphore_fd
901 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
902 
903 // VK_EXT_external_memory_host
904 PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = nullptr;
905 
906 // VK_EXT_host_query_reset
907 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = nullptr;
908 
909 // VK_EXT_transform_feedback
910 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
911 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT             = nullptr;
912 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT                 = nullptr;
913 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT                       = nullptr;
914 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT                           = nullptr;
915 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT               = nullptr;
916 
917 // VK_KHR_get_memory_requirements2
918 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
919 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR   = nullptr;
920 
921 // VK_KHR_bind_memory2
922 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = nullptr;
923 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR   = nullptr;
924 
925 // VK_KHR_external_fence_capabilities
926 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
927     nullptr;
928 
929 // VK_KHR_external_fence_fd
930 PFN_vkGetFenceFdKHR vkGetFenceFdKHR       = nullptr;
931 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
932 
933 // VK_KHR_external_semaphore_capabilities
934 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
935     vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
936 
937 // VK_KHR_sampler_ycbcr_conversion
938 PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR   = nullptr;
939 PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = nullptr;
940 
941 // VK_KHR_create_renderpass2
942 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = nullptr;
943 
944 #    if defined(ANGLE_PLATFORM_FUCHSIA)
945 // VK_FUCHSIA_imagepipe_surface
946 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
947 #    endif
948 
949 #    if defined(ANGLE_PLATFORM_ANDROID)
950 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
951     nullptr;
952 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
953 #    endif
954 
955 #    if defined(ANGLE_PLATFORM_GGP)
956 PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = nullptr;
957 #    endif
958 
959 #    define GET_INSTANCE_FUNC(vkName)                                                          \
960         do                                                                                     \
961         {                                                                                      \
962             vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
963             ASSERT(vkName);                                                                    \
964         } while (0)
965 
966 #    define GET_DEVICE_FUNC(vkName)                                                        \
967         do                                                                                 \
968         {                                                                                  \
969             vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
970             ASSERT(vkName);                                                                \
971         } while (0)
972 
973 // VK_KHR_shared_presentable_image
974 PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = nullptr;
975 
InitDebugUtilsEXTFunctions(VkInstance instance)976 void InitDebugUtilsEXTFunctions(VkInstance instance)
977 {
978     GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
979     GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
980     GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
981     GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
982     GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
983 }
984 
InitDebugReportEXTFunctions(VkInstance instance)985 void InitDebugReportEXTFunctions(VkInstance instance)
986 {
987     GET_INSTANCE_FUNC(vkCreateDebugReportCallbackEXT);
988     GET_INSTANCE_FUNC(vkDestroyDebugReportCallbackEXT);
989 }
990 
InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)991 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)
992 {
993     GET_INSTANCE_FUNC(vkGetPhysicalDeviceProperties2KHR);
994     GET_INSTANCE_FUNC(vkGetPhysicalDeviceFeatures2KHR);
995     GET_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR);
996 }
997 
InitTransformFeedbackEXTFunctions(VkDevice device)998 void InitTransformFeedbackEXTFunctions(VkDevice device)
999 {
1000     GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
1001     GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
1002     GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
1003     GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
1004     GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
1005     GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
1006 }
1007 
1008 // VK_KHR_sampler_ycbcr_conversion
InitSamplerYcbcrKHRFunctions(VkDevice device)1009 void InitSamplerYcbcrKHRFunctions(VkDevice device)
1010 {
1011     GET_DEVICE_FUNC(vkCreateSamplerYcbcrConversionKHR);
1012     GET_DEVICE_FUNC(vkDestroySamplerYcbcrConversionKHR);
1013 }
1014 
1015 // VK_KHR_create_renderpass2
InitRenderPass2KHRFunctions(VkDevice device)1016 void InitRenderPass2KHRFunctions(VkDevice device)
1017 {
1018     GET_DEVICE_FUNC(vkCreateRenderPass2KHR);
1019 }
1020 
1021 #    if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)1022 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
1023 {
1024     GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
1025 }
1026 #    endif
1027 
1028 #    if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)1029 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)
1030 {
1031     GET_INSTANCE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
1032     GET_INSTANCE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
1033 }
1034 #    endif
1035 
1036 #    if defined(ANGLE_PLATFORM_GGP)
InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)1037 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)
1038 {
1039     GET_INSTANCE_FUNC(vkCreateStreamDescriptorSurfaceGGP);
1040 }
1041 #    endif  // defined(ANGLE_PLATFORM_GGP)
1042 
InitExternalSemaphoreFdFunctions(VkInstance instance)1043 void InitExternalSemaphoreFdFunctions(VkInstance instance)
1044 {
1045     GET_INSTANCE_FUNC(vkImportSemaphoreFdKHR);
1046 }
1047 
InitExternalMemoryHostFunctions(VkInstance instance)1048 void InitExternalMemoryHostFunctions(VkInstance instance)
1049 {
1050     GET_INSTANCE_FUNC(vkGetMemoryHostPointerPropertiesEXT);
1051 }
1052 
InitHostQueryResetFunctions(VkInstance instance)1053 void InitHostQueryResetFunctions(VkInstance instance)
1054 {
1055     GET_INSTANCE_FUNC(vkGetMemoryHostPointerPropertiesEXT);
1056 }
1057 
1058 // VK_KHR_get_memory_requirements2
InitGetMemoryRequirements2KHRFunctions(VkDevice device)1059 void InitGetMemoryRequirements2KHRFunctions(VkDevice device)
1060 {
1061     GET_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR);
1062     GET_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR);
1063 }
1064 
1065 // VK_KHR_bind_memory2
InitBindMemory2KHRFunctions(VkDevice device)1066 void InitBindMemory2KHRFunctions(VkDevice device)
1067 {
1068     GET_DEVICE_FUNC(vkBindBufferMemory2KHR);
1069     GET_DEVICE_FUNC(vkBindImageMemory2KHR);
1070 }
1071 
1072 // VK_KHR_external_fence_capabilities
InitExternalFenceCapabilitiesFunctions(VkInstance instance)1073 void InitExternalFenceCapabilitiesFunctions(VkInstance instance)
1074 {
1075     GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalFencePropertiesKHR);
1076 }
1077 
1078 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkInstance instance)1079 void InitExternalFenceFdFunctions(VkInstance instance)
1080 {
1081     GET_INSTANCE_FUNC(vkGetFenceFdKHR);
1082     GET_INSTANCE_FUNC(vkImportFenceFdKHR);
1083 }
1084 
1085 // VK_KHR_external_semaphore_capabilities
InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)1086 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)
1087 {
1088     GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalSemaphorePropertiesKHR);
1089 }
1090 
1091 // VK_KHR_shared_presentable_image
InitGetSwapchainStatusKHRFunctions(VkDevice device)1092 void InitGetSwapchainStatusKHRFunctions(VkDevice device)
1093 {
1094     GET_DEVICE_FUNC(vkGetSwapchainStatusKHR);
1095 }
1096 
1097 #    undef GET_INSTANCE_FUNC
1098 #    undef GET_DEVICE_FUNC
1099 
1100 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
1101 
CalculateGenerateMipmapFilter(ContextVk * contextVk,angle::FormatID formatID)1102 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID)
1103 {
1104     const bool formatSupportsLinearFiltering = contextVk->getRenderer()->hasImageFormatFeatureBits(
1105         formatID, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
1106     const bool hintFastest = contextVk->getState().getGenerateMipmapHint() == GL_FASTEST;
1107 
1108     return formatSupportsLinearFiltering && !hintFastest ? GL_LINEAR : GL_NEAREST;
1109 }
1110 
1111 // Return the log of samples.  Assumes |sampleCount| is a power of 2.  The result can be used to
1112 // index an array based on sample count.  See for example TextureVk::PerSampleCountArray.
PackSampleCount(GLint sampleCount)1113 size_t PackSampleCount(GLint sampleCount)
1114 {
1115     if (sampleCount == 0)
1116     {
1117         sampleCount = 1;
1118     }
1119 
1120     // We currently only support up to 16xMSAA.
1121     ASSERT(sampleCount <= VK_SAMPLE_COUNT_16_BIT);
1122     ASSERT(gl::isPow2(sampleCount));
1123     return gl::ScanForward(static_cast<uint32_t>(sampleCount));
1124 }
1125 
1126 namespace gl_vk
1127 {
1128 
GetFilter(const GLenum filter)1129 VkFilter GetFilter(const GLenum filter)
1130 {
1131     switch (filter)
1132     {
1133         case GL_LINEAR_MIPMAP_LINEAR:
1134         case GL_LINEAR_MIPMAP_NEAREST:
1135         case GL_LINEAR:
1136             return VK_FILTER_LINEAR;
1137         case GL_NEAREST_MIPMAP_LINEAR:
1138         case GL_NEAREST_MIPMAP_NEAREST:
1139         case GL_NEAREST:
1140             return VK_FILTER_NEAREST;
1141         default:
1142             UNIMPLEMENTED();
1143             return VK_FILTER_MAX_ENUM;
1144     }
1145 }
1146 
GetSamplerMipmapMode(const GLenum filter)1147 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
1148 {
1149     switch (filter)
1150     {
1151         case GL_LINEAR_MIPMAP_LINEAR:
1152         case GL_NEAREST_MIPMAP_LINEAR:
1153             return VK_SAMPLER_MIPMAP_MODE_LINEAR;
1154         case GL_LINEAR:
1155         case GL_NEAREST:
1156         case GL_NEAREST_MIPMAP_NEAREST:
1157         case GL_LINEAR_MIPMAP_NEAREST:
1158             return VK_SAMPLER_MIPMAP_MODE_NEAREST;
1159         default:
1160             UNIMPLEMENTED();
1161             return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
1162     }
1163 }
1164 
GetSamplerAddressMode(const GLenum wrap)1165 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
1166 {
1167     switch (wrap)
1168     {
1169         case GL_REPEAT:
1170             return VK_SAMPLER_ADDRESS_MODE_REPEAT;
1171         case GL_MIRRORED_REPEAT:
1172             return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
1173         case GL_CLAMP_TO_BORDER:
1174             return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
1175         case GL_CLAMP_TO_EDGE:
1176             return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1177         default:
1178             UNIMPLEMENTED();
1179             return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
1180     }
1181 }
1182 
GetRect(const gl::Rectangle & source)1183 VkRect2D GetRect(const gl::Rectangle &source)
1184 {
1185     return {{source.x, source.y},
1186             {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
1187 }
1188 
GetPrimitiveTopology(gl::PrimitiveMode mode)1189 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
1190 {
1191     switch (mode)
1192     {
1193         case gl::PrimitiveMode::Triangles:
1194             return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1195         case gl::PrimitiveMode::Points:
1196             return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1197         case gl::PrimitiveMode::Lines:
1198             return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
1199         case gl::PrimitiveMode::LineStrip:
1200             return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1201         case gl::PrimitiveMode::TriangleFan:
1202             return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
1203         case gl::PrimitiveMode::TriangleStrip:
1204             return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1205         case gl::PrimitiveMode::LineLoop:
1206             return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1207         case gl::PrimitiveMode::LinesAdjacency:
1208             return VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
1209         case gl::PrimitiveMode::LineStripAdjacency:
1210             return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
1211         case gl::PrimitiveMode::TrianglesAdjacency:
1212             return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
1213         case gl::PrimitiveMode::TriangleStripAdjacency:
1214             return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
1215         case gl::PrimitiveMode::Patches:
1216             return VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
1217         default:
1218             UNREACHABLE();
1219             return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1220     }
1221 }
1222 
GetCullMode(const gl::RasterizerState & rasterState)1223 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
1224 {
1225     if (!rasterState.cullFace)
1226     {
1227         return VK_CULL_MODE_NONE;
1228     }
1229 
1230     switch (rasterState.cullMode)
1231     {
1232         case gl::CullFaceMode::Front:
1233             return VK_CULL_MODE_FRONT_BIT;
1234         case gl::CullFaceMode::Back:
1235             return VK_CULL_MODE_BACK_BIT;
1236         case gl::CullFaceMode::FrontAndBack:
1237             return VK_CULL_MODE_FRONT_AND_BACK;
1238         default:
1239             UNREACHABLE();
1240             return VK_CULL_MODE_NONE;
1241     }
1242 }
1243 
GetFrontFace(GLenum frontFace,bool invertCullFace)1244 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
1245 {
1246     // Invert CW and CCW to have the same behavior as OpenGL.
1247     switch (frontFace)
1248     {
1249         case GL_CW:
1250             return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
1251         case GL_CCW:
1252             return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
1253         default:
1254             UNREACHABLE();
1255             return VK_FRONT_FACE_CLOCKWISE;
1256     }
1257 }
1258 
GetSamples(GLint sampleCount)1259 VkSampleCountFlagBits GetSamples(GLint sampleCount)
1260 {
1261     switch (sampleCount)
1262     {
1263         case 0:
1264             UNREACHABLE();
1265             return VK_SAMPLE_COUNT_1_BIT;
1266         case 1:
1267             return VK_SAMPLE_COUNT_1_BIT;
1268         case 2:
1269             return VK_SAMPLE_COUNT_2_BIT;
1270         case 4:
1271             return VK_SAMPLE_COUNT_4_BIT;
1272         case 8:
1273             return VK_SAMPLE_COUNT_8_BIT;
1274         case 16:
1275             return VK_SAMPLE_COUNT_16_BIT;
1276         case 32:
1277             return VK_SAMPLE_COUNT_32_BIT;
1278         default:
1279             UNREACHABLE();
1280             return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1281     }
1282 }
1283 
GetSwizzle(const GLenum swizzle)1284 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1285 {
1286     switch (swizzle)
1287     {
1288         case GL_ALPHA:
1289             return VK_COMPONENT_SWIZZLE_A;
1290         case GL_RED:
1291             return VK_COMPONENT_SWIZZLE_R;
1292         case GL_GREEN:
1293             return VK_COMPONENT_SWIZZLE_G;
1294         case GL_BLUE:
1295             return VK_COMPONENT_SWIZZLE_B;
1296         case GL_ZERO:
1297             return VK_COMPONENT_SWIZZLE_ZERO;
1298         case GL_ONE:
1299             return VK_COMPONENT_SWIZZLE_ONE;
1300         default:
1301             UNREACHABLE();
1302             return VK_COMPONENT_SWIZZLE_IDENTITY;
1303     }
1304 }
1305 
GetCompareOp(const GLenum compareFunc)1306 VkCompareOp GetCompareOp(const GLenum compareFunc)
1307 {
1308     switch (compareFunc)
1309     {
1310         case GL_NEVER:
1311             return VK_COMPARE_OP_NEVER;
1312         case GL_LESS:
1313             return VK_COMPARE_OP_LESS;
1314         case GL_EQUAL:
1315             return VK_COMPARE_OP_EQUAL;
1316         case GL_LEQUAL:
1317             return VK_COMPARE_OP_LESS_OR_EQUAL;
1318         case GL_GREATER:
1319             return VK_COMPARE_OP_GREATER;
1320         case GL_NOTEQUAL:
1321             return VK_COMPARE_OP_NOT_EQUAL;
1322         case GL_GEQUAL:
1323             return VK_COMPARE_OP_GREATER_OR_EQUAL;
1324         case GL_ALWAYS:
1325             return VK_COMPARE_OP_ALWAYS;
1326         default:
1327             UNREACHABLE();
1328             return VK_COMPARE_OP_ALWAYS;
1329     }
1330 }
1331 
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1332 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1333 {
1334     vkOffset->x = glOffset.x;
1335     vkOffset->y = glOffset.y;
1336     vkOffset->z = glOffset.z;
1337 }
1338 
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1339 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1340 {
1341     vkExtent->width  = glExtent.width;
1342     vkExtent->height = glExtent.height;
1343     vkExtent->depth  = glExtent.depth;
1344 }
1345 
GetImageType(gl::TextureType textureType)1346 VkImageType GetImageType(gl::TextureType textureType)
1347 {
1348     switch (textureType)
1349     {
1350         case gl::TextureType::_2D:
1351         case gl::TextureType::_2DArray:
1352         case gl::TextureType::_2DMultisample:
1353         case gl::TextureType::_2DMultisampleArray:
1354         case gl::TextureType::CubeMap:
1355         case gl::TextureType::CubeMapArray:
1356         case gl::TextureType::External:
1357             return VK_IMAGE_TYPE_2D;
1358         case gl::TextureType::_3D:
1359             return VK_IMAGE_TYPE_3D;
1360         default:
1361             // We will need to implement all the texture types for ES3+.
1362             UNIMPLEMENTED();
1363             return VK_IMAGE_TYPE_MAX_ENUM;
1364     }
1365 }
1366 
GetImageViewType(gl::TextureType textureType)1367 VkImageViewType GetImageViewType(gl::TextureType textureType)
1368 {
1369     switch (textureType)
1370     {
1371         case gl::TextureType::_2D:
1372         case gl::TextureType::_2DMultisample:
1373         case gl::TextureType::External:
1374             return VK_IMAGE_VIEW_TYPE_2D;
1375         case gl::TextureType::_2DArray:
1376         case gl::TextureType::_2DMultisampleArray:
1377             return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1378         case gl::TextureType::_3D:
1379             return VK_IMAGE_VIEW_TYPE_3D;
1380         case gl::TextureType::CubeMap:
1381             return VK_IMAGE_VIEW_TYPE_CUBE;
1382         case gl::TextureType::CubeMapArray:
1383             return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
1384         default:
1385             // We will need to implement all the texture types for ES3+.
1386             UNIMPLEMENTED();
1387             return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1388     }
1389 }
1390 
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1391 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1392 {
1393     return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1394            (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1395 }
1396 
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1397 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1398 {
1399     VkShaderStageFlags flags = 0;
1400     for (const gl::ShaderType shaderType : activeShaders)
1401     {
1402         flags |= kShaderStageMap[shaderType];
1403     }
1404     return flags;
1405 }
1406 
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,bool clipSpaceOriginUpperLeft,GLint renderAreaHeight,VkViewport * viewportOut)1407 void GetViewport(const gl::Rectangle &viewport,
1408                  float nearPlane,
1409                  float farPlane,
1410                  bool invertViewport,
1411                  bool clipSpaceOriginUpperLeft,
1412                  GLint renderAreaHeight,
1413                  VkViewport *viewportOut)
1414 {
1415     viewportOut->x        = static_cast<float>(viewport.x);
1416     viewportOut->y        = static_cast<float>(viewport.y);
1417     viewportOut->width    = static_cast<float>(viewport.width);
1418     viewportOut->height   = static_cast<float>(viewport.height);
1419     viewportOut->minDepth = gl::clamp01(nearPlane);
1420     viewportOut->maxDepth = gl::clamp01(farPlane);
1421 
1422     // Say an application intends to draw a primitive (shown as 'o' below), it can choose to use
1423     // different clip space origin. When clip space origin (shown as 'C' below) is switched from
1424     // lower-left to upper-left, primitives will be rendered with its y-coordinate flipped.
1425 
1426     // Rendered content will differ based on whether it is a default framebuffer or a user defined
1427     // framebuffer. We modify the viewport's 'y' and 'h' accordingly.
1428 
1429     // clip space origin is lower-left
1430     // Expected draw in GLES        default framebuffer    user defined framebuffer
1431     // (0,H)                        (0,0)                  (0,0)
1432     // +                            +-----------+  (W,0)   +-----------+ (W,0)
1433     // |                            |                      |  C----+
1434     // |                            |                      |  |    | (h)
1435     // |  +----+                    |  +----+              |  | O  |
1436     // |  | O  |                    |  | O  | (-h)         |  +----+
1437     // |  |    |                    |  |    |              |
1438     // |  C----+                    |  C----+              |
1439     // +-----------+ (W,0)          +                      +
1440     // (0,0)                        (0,H)                  (0,H)
1441     //                              y' = H - h             y' = y
1442 
1443     // clip space origin is upper-left
1444     // Expected draw in GLES        default framebuffer     user defined framebuffer
1445     // (0,H)                        (0,0)                  (0,0)
1446     // +                            +-----------+  (W,0)   +-----------+ (W,0)
1447     // |                            |                      |  +----+
1448     // |                            |                      |  | O  | (-h)
1449     // |  C----+                    |  C----+              |  |    |
1450     // |  |    |                    |  |    | (h)          |  C----+
1451     // |  | O  |                    |  | O  |              |
1452     // |  +----+                    |  +----+              |
1453     // +-----------+  (W,0)         +                      +
1454     // (0,0)                        (0,H)                  (0,H)
1455     //                              y' = H - (y + h)       y' = y + H
1456 
1457     if (clipSpaceOriginUpperLeft)
1458     {
1459         if (invertViewport)
1460         {
1461             viewportOut->y = static_cast<float>(renderAreaHeight - (viewport.height + viewport.y));
1462         }
1463         else
1464         {
1465             viewportOut->y      = static_cast<float>(viewport.height + viewport.y);
1466             viewportOut->height = -viewportOut->height;
1467         }
1468     }
1469     else
1470     {
1471         if (invertViewport)
1472         {
1473             viewportOut->y      = static_cast<float>(renderAreaHeight - viewport.y);
1474             viewportOut->height = -viewportOut->height;
1475         }
1476     }
1477 }
1478 
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1479 void GetExtentsAndLayerCount(gl::TextureType textureType,
1480                              const gl::Extents &extents,
1481                              VkExtent3D *extentsOut,
1482                              uint32_t *layerCountOut)
1483 {
1484     extentsOut->width  = extents.width;
1485     extentsOut->height = extents.height;
1486 
1487     switch (textureType)
1488     {
1489         case gl::TextureType::CubeMap:
1490             extentsOut->depth = 1;
1491             *layerCountOut    = gl::kCubeFaceCount;
1492             break;
1493 
1494         case gl::TextureType::_2DArray:
1495         case gl::TextureType::_2DMultisampleArray:
1496         case gl::TextureType::CubeMapArray:
1497             extentsOut->depth = 1;
1498             *layerCountOut    = extents.depth;
1499             break;
1500 
1501         default:
1502             extentsOut->depth = extents.depth;
1503             *layerCountOut    = 1;
1504             break;
1505     }
1506 }
1507 
GetLevelIndex(gl::LevelIndex levelGL,gl::LevelIndex baseLevel)1508 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel)
1509 {
1510     ASSERT(baseLevel <= levelGL);
1511     return vk::LevelIndex(levelGL.get() - baseLevel.get());
1512 }
1513 
1514 }  // namespace gl_vk
1515 
1516 namespace vk_gl
1517 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1518 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1519 {
1520     // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b.  At the time of this
1521     // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1522     for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1523     {
1524         setOut->insert(static_cast<GLuint>(1 << bit));
1525     }
1526 }
1527 
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1528 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1529 {
1530     GLuint maxCount = 0;
1531     for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1532     {
1533         maxCount = static_cast<GLuint>(1 << bit);
1534     }
1535     return maxCount;
1536 }
1537 
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1538 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1539 {
1540     for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1541     {
1542         GLuint sampleCount = static_cast<GLuint>(1 << bit);
1543         if (sampleCount >= requestedCount)
1544         {
1545             return sampleCount;
1546         }
1547     }
1548 
1549     UNREACHABLE();
1550     return 0;
1551 }
1552 
GetLevelIndex(vk::LevelIndex levelVk,gl::LevelIndex baseLevel)1553 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel)
1554 {
1555     return gl::LevelIndex(levelVk.get() + baseLevel.get());
1556 }
1557 }  // namespace vk_gl
1558 
1559 namespace vk
1560 {
1561 // BufferBlock implementation.
BufferBlock()1562 BufferBlock::BufferBlock() : mMemoryPropertyFlags(0), mSize(0), mMappedMemory(nullptr) {}
1563 
BufferBlock(BufferBlock && other)1564 BufferBlock::BufferBlock(BufferBlock &&other)
1565     : mVirtualBlock(std::move(other.mVirtualBlock)),
1566       mBuffer(std::move(other.mBuffer)),
1567       mDeviceMemory(std::move(other.mDeviceMemory)),
1568       mMemoryPropertyFlags(other.mMemoryPropertyFlags),
1569       mSize(other.mSize),
1570       mMappedMemory(other.mMappedMemory),
1571       mSerial(other.mSerial),
1572       mCountRemainsEmpty(0)
1573 {}
1574 
operator =(BufferBlock && other)1575 BufferBlock &BufferBlock::operator=(BufferBlock &&other)
1576 {
1577     std::swap(mVirtualBlock, other.mVirtualBlock);
1578     std::swap(mBuffer, other.mBuffer);
1579     std::swap(mDeviceMemory, other.mDeviceMemory);
1580     std::swap(mMemoryPropertyFlags, other.mMemoryPropertyFlags);
1581     std::swap(mSize, other.mSize);
1582     std::swap(mMappedMemory, other.mMappedMemory);
1583     std::swap(mSerial, other.mSerial);
1584     std::swap(mCountRemainsEmpty, other.mCountRemainsEmpty);
1585     return *this;
1586 }
1587 
~BufferBlock()1588 BufferBlock::~BufferBlock()
1589 {
1590     ASSERT(!mVirtualBlock.valid());
1591     ASSERT(!mBuffer.valid());
1592     ASSERT(!mDeviceMemory.valid());
1593 }
1594 
destroy(RendererVk * renderer)1595 void BufferBlock::destroy(RendererVk *renderer)
1596 {
1597     VkDevice device = renderer->getDevice();
1598 
1599     if (mMappedMemory)
1600     {
1601         unmap(device);
1602     }
1603 
1604     mVirtualBlock.destroy(device);
1605     mBuffer.destroy(device);
1606     mDeviceMemory.destroy(device);
1607 }
1608 
init(Context * context,Buffer & buffer,vma::VirtualBlockCreateFlags flags,DeviceMemory & deviceMemory,VkMemoryPropertyFlags memoryPropertyFlags,VkDeviceSize size)1609 angle::Result BufferBlock::init(Context *context,
1610                                 Buffer &buffer,
1611                                 vma::VirtualBlockCreateFlags flags,
1612                                 DeviceMemory &deviceMemory,
1613                                 VkMemoryPropertyFlags memoryPropertyFlags,
1614                                 VkDeviceSize size)
1615 {
1616     RendererVk *renderer = context->getRenderer();
1617     ASSERT(!mVirtualBlock.valid());
1618     ASSERT(!mBuffer.valid());
1619     ASSERT(!mDeviceMemory.valid());
1620 
1621     ANGLE_VK_TRY(context, mVirtualBlock.init(renderer->getDevice(), flags, size));
1622 
1623     mBuffer              = std::move(buffer);
1624     mDeviceMemory        = std::move(deviceMemory);
1625     mMemoryPropertyFlags = memoryPropertyFlags;
1626     mSize                = size;
1627     mMappedMemory        = nullptr;
1628     mSerial              = renderer->getResourceSerialFactory().generateBufferSerial();
1629 
1630     return angle::Result::Continue;
1631 }
1632 
initWithoutVirtualBlock(Context * context,Buffer & buffer,DeviceMemory & deviceMemory,VkMemoryPropertyFlags memoryPropertyFlags,VkDeviceSize size)1633 void BufferBlock::initWithoutVirtualBlock(Context *context,
1634                                           Buffer &buffer,
1635                                           DeviceMemory &deviceMemory,
1636                                           VkMemoryPropertyFlags memoryPropertyFlags,
1637                                           VkDeviceSize size)
1638 {
1639     RendererVk *renderer = context->getRenderer();
1640     ASSERT(!mVirtualBlock.valid());
1641     ASSERT(!mBuffer.valid());
1642     ASSERT(!mDeviceMemory.valid());
1643 
1644     mBuffer              = std::move(buffer);
1645     mDeviceMemory        = std::move(deviceMemory);
1646     mMemoryPropertyFlags = memoryPropertyFlags;
1647     mSize                = size;
1648     mMappedMemory        = nullptr;
1649     mSerial              = renderer->getResourceSerialFactory().generateBufferSerial();
1650 }
1651 
map(const VkDevice device)1652 VkResult BufferBlock::map(const VkDevice device)
1653 {
1654     ASSERT(mMappedMemory == nullptr);
1655     return mDeviceMemory.map(device, 0, mSize, 0, &mMappedMemory);
1656 }
1657 
unmap(const VkDevice device)1658 void BufferBlock::unmap(const VkDevice device)
1659 {
1660     mDeviceMemory.unmap(device);
1661     mMappedMemory = nullptr;
1662 }
1663 
free(VkDeviceSize offset)1664 void BufferBlock::free(VkDeviceSize offset)
1665 {
1666     std::unique_lock<std::mutex> lock(mVirtualBlockMutex);
1667     mVirtualBlock.free(offset);
1668 }
1669 
getAndIncrementEmptyCounter()1670 int32_t BufferBlock::getAndIncrementEmptyCounter()
1671 {
1672     return ++mCountRemainsEmpty;
1673 }
1674 
1675 // BufferSuballocation implementation.
map(Context * context)1676 VkResult BufferSuballocation::map(Context *context)
1677 {
1678     return mBufferBlock->map(context->getDevice());
1679 }
1680 }  // namespace vk
1681 }  // namespace rx
1682