1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/Display.h"
14 #include "libANGLE/renderer/vulkan/BufferVk.h"
15 #include "libANGLE/renderer/vulkan/ContextVk.h"
16 #include "libANGLE/renderer/vulkan/DisplayVk.h"
17 #include "libANGLE/renderer/vulkan/android/vk_android_utils.h"
18 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
19 #include "libANGLE/renderer/vulkan/vk_ref_counted_event.h"
20 #include "libANGLE/renderer/vulkan/vk_renderer.h"
21 #include "libANGLE/renderer/vulkan/vk_resource.h"
22
23 namespace angle
24 {
ToEGL(Result result,EGLint errorCode)25 egl::Error ToEGL(Result result, EGLint errorCode)
26 {
27 if (result != angle::Result::Continue)
28 {
29 egl::Error error = std::move(*egl::Display::GetCurrentThreadErrorScratchSpace());
30 error.setCode(errorCode);
31 return error;
32 }
33 else
34 {
35 return egl::NoError();
36 }
37 }
38 } // namespace angle
39
40 namespace rx
41 {
42 namespace
43 {
44 // Pick an arbitrary value to initialize non-zero memory for sanitization. Note that 0x3F3F3F3F
45 // as float is about 0.75.
46 constexpr int kNonZeroInitValue = 0x3F;
47
GetStagingBufferUsageFlags(vk::StagingUsage usage)48 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
49 {
50 switch (usage)
51 {
52 case vk::StagingUsage::Read:
53 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
54 case vk::StagingUsage::Write:
55 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
56 case vk::StagingUsage::Both:
57 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
58 default:
59 UNREACHABLE();
60 return 0;
61 }
62 }
63
FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties & memoryProperties,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut)64 bool FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties &memoryProperties,
65 const VkMemoryRequirements &memoryRequirements,
66 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
67 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
68 uint32_t *typeIndexOut)
69 {
70 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
71 {
72 ASSERT(memoryIndex < memoryProperties.memoryTypeCount);
73
74 if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
75 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
76 {
77 *memoryPropertyFlagsOut = memoryProperties.memoryTypes[memoryIndex].propertyFlags;
78 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
79 return true;
80 }
81 }
82
83 return false;
84 }
85
FindAndAllocateCompatibleMemory(vk::ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)86 VkResult FindAndAllocateCompatibleMemory(vk::ErrorContext *context,
87 vk::MemoryAllocationType memoryAllocationType,
88 const vk::MemoryProperties &memoryProperties,
89 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
90 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
91 const VkMemoryRequirements &memoryRequirements,
92 const void *extraAllocationInfo,
93 uint32_t *memoryTypeIndexOut,
94 vk::DeviceMemory *deviceMemoryOut)
95 {
96 VkDevice device = context->getDevice();
97 vk::Renderer *renderer = context->getRenderer();
98
99 VK_RESULT_TRY(memoryProperties.findCompatibleMemoryIndex(
100 renderer, memoryRequirements, requestedMemoryPropertyFlags,
101 (extraAllocationInfo != nullptr), memoryPropertyFlagsOut, memoryTypeIndexOut));
102
103 VkMemoryAllocateInfo allocInfo = {};
104 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
105 allocInfo.pNext = extraAllocationInfo;
106 allocInfo.memoryTypeIndex = *memoryTypeIndexOut;
107 allocInfo.allocationSize = memoryRequirements.size;
108
109 // Add the new allocation for tracking.
110 renderer->getMemoryAllocationTracker()->setPendingMemoryAlloc(
111 memoryAllocationType, allocInfo.allocationSize, *memoryTypeIndexOut);
112
113 VkResult result = deviceMemoryOut->allocate(device, allocInfo);
114
115 if (result == VK_SUCCESS)
116 {
117 renderer->onMemoryAlloc(memoryAllocationType, allocInfo.allocationSize, *memoryTypeIndexOut,
118 deviceMemoryOut->getHandle());
119 }
120 return result;
121 }
122
123 template <typename T>
124 VkResult AllocateAndBindBufferOrImageMemory(vk::ErrorContext *context,
125 vk::MemoryAllocationType memoryAllocationType,
126 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
127 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
128 const VkMemoryRequirements &memoryRequirements,
129 const void *extraAllocationInfo,
130 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
131 T *bufferOrImage,
132 uint32_t *memoryTypeIndexOut,
133 vk::DeviceMemory *deviceMemoryOut);
134
135 template <>
AllocateAndBindBufferOrImageMemory(vk::ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Image * image,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)136 VkResult AllocateAndBindBufferOrImageMemory(vk::ErrorContext *context,
137 vk::MemoryAllocationType memoryAllocationType,
138 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
139 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
140 const VkMemoryRequirements &memoryRequirements,
141 const void *extraAllocationInfo,
142 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
143 vk::Image *image,
144 uint32_t *memoryTypeIndexOut,
145 vk::DeviceMemory *deviceMemoryOut)
146 {
147 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
148
149 VK_RESULT_TRY(FindAndAllocateCompatibleMemory(
150 context, memoryAllocationType, memoryProperties, requestedMemoryPropertyFlags,
151 memoryPropertyFlagsOut, memoryRequirements, extraAllocationInfo, memoryTypeIndexOut,
152 deviceMemoryOut));
153
154 if (extraBindInfo)
155 {
156 VkBindImageMemoryInfoKHR bindInfo = {};
157 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
158 bindInfo.pNext = extraBindInfo;
159 bindInfo.image = image->getHandle();
160 bindInfo.memory = deviceMemoryOut->getHandle();
161 bindInfo.memoryOffset = 0;
162
163 VK_RESULT_TRY(image->bindMemory2(context->getDevice(), bindInfo));
164 }
165 else
166 {
167 VK_RESULT_TRY(image->bindMemory(context->getDevice(), *deviceMemoryOut));
168 }
169
170 return VK_SUCCESS;
171 }
172
173 template <>
AllocateAndBindBufferOrImageMemory(vk::ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Buffer * buffer,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)174 VkResult AllocateAndBindBufferOrImageMemory(vk::ErrorContext *context,
175 vk::MemoryAllocationType memoryAllocationType,
176 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
177 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
178 const VkMemoryRequirements &memoryRequirements,
179 const void *extraAllocationInfo,
180 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
181 vk::Buffer *buffer,
182 uint32_t *memoryTypeIndexOut,
183 vk::DeviceMemory *deviceMemoryOut)
184 {
185 ASSERT(extraBindInfo == nullptr);
186
187 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
188
189 VK_RESULT_TRY(FindAndAllocateCompatibleMemory(
190 context, memoryAllocationType, memoryProperties, requestedMemoryPropertyFlags,
191 memoryPropertyFlagsOut, memoryRequirements, extraAllocationInfo, memoryTypeIndexOut,
192 deviceMemoryOut));
193
194 VK_RESULT_TRY(buffer->bindMemory(context->getDevice(), *deviceMemoryOut, 0));
195 return VK_SUCCESS;
196 }
197
198 template <typename T>
AllocateBufferOrImageMemory(vk::ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)199 VkResult AllocateBufferOrImageMemory(vk::ErrorContext *context,
200 vk::MemoryAllocationType memoryAllocationType,
201 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
202 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
203 const void *extraAllocationInfo,
204 T *bufferOrImage,
205 uint32_t *memoryTypeIndexOut,
206 vk::DeviceMemory *deviceMemoryOut,
207 VkDeviceSize *sizeOut)
208 {
209 // Call driver to determine memory requirements.
210 VkMemoryRequirements memoryRequirements;
211 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
212
213 VK_RESULT_TRY(AllocateAndBindBufferOrImageMemory(
214 context, memoryAllocationType, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
215 memoryRequirements, extraAllocationInfo, nullptr, bufferOrImage, memoryTypeIndexOut,
216 deviceMemoryOut));
217
218 *sizeOut = memoryRequirements.size;
219
220 return VK_SUCCESS;
221 }
222
223 // Unified layer that includes full validation layer stack
224 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
225 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
226 const char *kVkValidationLayerNames[] = {
227 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
228 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
229 "VK_LAYER_GOOGLE_unique_objects"};
230
231 } // anonymous namespace
232
VulkanResultString(VkResult result)233 const char *VulkanResultString(VkResult result)
234 {
235 switch (result)
236 {
237 case VK_SUCCESS:
238 return "Command successfully completed";
239 case VK_NOT_READY:
240 return "A fence or query has not yet completed";
241 case VK_TIMEOUT:
242 return "A wait operation has not completed in the specified time";
243 case VK_EVENT_SET:
244 return "An event is signaled";
245 case VK_EVENT_RESET:
246 return "An event is unsignaled";
247 case VK_INCOMPLETE:
248 return "A return array was too small for the result";
249 case VK_SUBOPTIMAL_KHR:
250 return "A swapchain no longer matches the surface properties exactly, but can still be "
251 "used to present to the surface successfully";
252 case VK_ERROR_OUT_OF_HOST_MEMORY:
253 return "A host memory allocation has failed";
254 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
255 return "A device memory allocation has failed";
256 case VK_ERROR_INITIALIZATION_FAILED:
257 return "Initialization of an object could not be completed for implementation-specific "
258 "reasons";
259 case VK_ERROR_DEVICE_LOST:
260 return "The logical or physical device has been lost";
261 case VK_ERROR_MEMORY_MAP_FAILED:
262 return "Mapping of a memory object has failed";
263 case VK_ERROR_LAYER_NOT_PRESENT:
264 return "A requested layer is not present or could not be loaded";
265 case VK_ERROR_EXTENSION_NOT_PRESENT:
266 return "A requested extension is not supported";
267 case VK_ERROR_FEATURE_NOT_PRESENT:
268 return "A requested feature is not supported";
269 case VK_ERROR_INCOMPATIBLE_DRIVER:
270 return "The requested version of Vulkan is not supported by the driver or is otherwise "
271 "incompatible for implementation-specific reasons";
272 case VK_ERROR_TOO_MANY_OBJECTS:
273 return "Too many objects of the type have already been created";
274 case VK_ERROR_FORMAT_NOT_SUPPORTED:
275 return "A requested format is not supported on this device";
276 case VK_ERROR_SURFACE_LOST_KHR:
277 return "A surface is no longer available";
278 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
279 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
280 "non-Vulkan API";
281 case VK_ERROR_OUT_OF_DATE_KHR:
282 return "A surface has changed in such a way that it is no longer compatible with the "
283 "swapchain";
284 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
285 return "The display used by a swapchain does not use the same presentable image "
286 "layout, or is incompatible in a way that prevents sharing an image";
287 case VK_ERROR_VALIDATION_FAILED_EXT:
288 return "The validation layers detected invalid API usage";
289 case VK_ERROR_INVALID_SHADER_NV:
290 return "Invalid Vulkan shader was generated";
291 case VK_ERROR_OUT_OF_POOL_MEMORY:
292 return "A pool memory allocation has failed";
293 case VK_ERROR_FRAGMENTED_POOL:
294 return "A pool allocation has failed due to fragmentation of the pool's memory";
295 case VK_ERROR_INVALID_EXTERNAL_HANDLE:
296 return "An external handle is not a valid handle of the specified type";
297 default:
298 return "Unknown vulkan error code";
299 }
300 }
301
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)302 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
303 bool mustHaveLayers,
304 VulkanLayerVector *enabledLayerNames)
305 {
306
307 ASSERT(enabledLayerNames);
308 for (const auto &layerProp : layerProps)
309 {
310 std::string layerPropLayerName = std::string(layerProp.layerName);
311
312 // Favor unified Khronos layer, but fallback to standard validation
313 if (layerPropLayerName == kVkKhronosValidationLayerName)
314 {
315 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
316 continue;
317 }
318 else if (layerPropLayerName == kVkStandardValidationLayerName)
319 {
320 enabledLayerNames->push_back(kVkStandardValidationLayerName);
321 continue;
322 }
323
324 for (const char *validationLayerName : kVkValidationLayerNames)
325 {
326 if (layerPropLayerName == validationLayerName)
327 {
328 enabledLayerNames->push_back(validationLayerName);
329 break;
330 }
331 }
332 }
333
334 if (enabledLayerNames->size() == 0)
335 {
336 // Generate an error if the layers were explicitly requested, warning otherwise.
337 if (mustHaveLayers)
338 {
339 ERR() << "Vulkan validation layers are missing.";
340 }
341 else
342 {
343 WARN() << "Vulkan validation layers are missing.";
344 }
345
346 return false;
347 }
348
349 return true;
350 }
351
352 namespace vk
353 {
354 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
355 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
356
GetDepthStencilAspectFlags(const angle::Format & format)357 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
358 {
359 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
360 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
361 }
362
GetFormatAspectFlags(const angle::Format & format)363 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
364 {
365 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
366 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
367 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
368 // channels.
369 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
370 }
371
372 // ErrorContext implementation.
ErrorContext(Renderer * renderer)373 ErrorContext::ErrorContext(Renderer *renderer) : mRenderer(renderer), mPerfCounters{} {}
374
375 ErrorContext::~ErrorContext() = default;
376
getDevice() const377 VkDevice ErrorContext::getDevice() const
378 {
379 return mRenderer->getDevice();
380 }
381
getFeatures() const382 const angle::FeaturesVk &ErrorContext::getFeatures() const
383 {
384 return mRenderer->getFeatures();
385 }
386
387 // MemoryProperties implementation.
MemoryProperties()388 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
389
init(VkPhysicalDevice physicalDevice)390 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
391 {
392 ASSERT(mMemoryProperties.memoryTypeCount == 0);
393 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
394 ASSERT(mMemoryProperties.memoryTypeCount > 0);
395 }
396
destroy()397 void MemoryProperties::destroy()
398 {
399 mMemoryProperties = {};
400 }
401
hasLazilyAllocatedMemory() const402 bool MemoryProperties::hasLazilyAllocatedMemory() const
403 {
404 for (uint32_t typeIndex = 0; typeIndex < mMemoryProperties.memoryTypeCount; ++typeIndex)
405 {
406 const VkMemoryType &memoryType = mMemoryProperties.memoryTypes[typeIndex];
407 if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
408 {
409 return true;
410 }
411 }
412 return false;
413 }
414
findCompatibleMemoryIndex(Renderer * renderer,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,bool isExternalMemory,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const415 VkResult MemoryProperties::findCompatibleMemoryIndex(
416 Renderer *renderer,
417 const VkMemoryRequirements &memoryRequirements,
418 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
419 bool isExternalMemory,
420 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
421 uint32_t *typeIndexOut) const
422 {
423 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
424
425 // The required size must not be greater than the maximum allocation size allowed by the driver.
426 if (memoryRequirements.size > renderer->getMaxMemoryAllocationSize())
427 {
428 renderer->getMemoryAllocationTracker()->onExceedingMaxMemoryAllocationSize(
429 memoryRequirements.size);
430 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
431 }
432
433 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
434 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
435 // TODO(jmadill): Determine if it is possible to cache indexes.
436 // TODO(jmadill): More efficient memory allocation.
437 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, requestedMemoryPropertyFlags,
438 memoryPropertyFlagsOut, typeIndexOut))
439 {
440 return VK_SUCCESS;
441 }
442
443 // We did not find a compatible memory type. If the caller wanted a host visible memory, just
444 // return the memory index with fallback, guaranteed, memory flags.
445 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
446 {
447 // The Vulkan spec says the following -
448 // There must be at least one memory type with both the
449 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
450 // bits set in its propertyFlags
451 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
452 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
453
454 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, fallbackMemoryPropertyFlags,
455 memoryPropertyFlagsOut, typeIndexOut))
456 {
457 return VK_SUCCESS;
458 }
459 }
460
461 // We did not find a compatible memory type. When importing external memory, there may be
462 // additional restrictions on memoryType. Find the first available memory type that Vulkan
463 // driver decides being compatible with external memory import.
464 if (isExternalMemory)
465 {
466 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, 0, memoryPropertyFlagsOut,
467 typeIndexOut))
468 {
469 return VK_SUCCESS;
470 }
471 }
472
473 // TODO(jmadill): Add error message to error.
474 return VK_ERROR_INCOMPATIBLE_DRIVER;
475 }
476
477 // StagingBuffer implementation.
StagingBuffer()478 StagingBuffer::StagingBuffer() : mSize(0) {}
479
destroy(Renderer * renderer)480 void StagingBuffer::destroy(Renderer *renderer)
481 {
482 VkDevice device = renderer->getDevice();
483 mBuffer.destroy(device);
484 mAllocation.destroy(renderer->getAllocator());
485 mSize = 0;
486 }
487
init(ErrorContext * context,VkDeviceSize size,StagingUsage usage)488 angle::Result StagingBuffer::init(ErrorContext *context, VkDeviceSize size, StagingUsage usage)
489 {
490 VkBufferCreateInfo createInfo = {};
491 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
492 createInfo.flags = 0;
493 createInfo.size = size;
494 createInfo.usage = GetStagingBufferUsageFlags(usage);
495 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
496 createInfo.queueFamilyIndexCount = 0;
497 createInfo.pQueueFamilyIndices = nullptr;
498
499 VkMemoryPropertyFlags preferredFlags = 0;
500 VkMemoryPropertyFlags requiredFlags =
501 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
502
503 Renderer *renderer = context->getRenderer();
504 const Allocator &allocator = renderer->getAllocator();
505
506 uint32_t memoryTypeIndex = 0;
507 ANGLE_VK_TRY(context,
508 allocator.createBuffer(createInfo, requiredFlags, preferredFlags,
509 renderer->getFeatures().persistentlyMappedBuffers.enabled,
510 &memoryTypeIndex, &mBuffer, &mAllocation));
511 mSize = static_cast<size_t>(size);
512
513 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
514 // invalid values ensures our testing doesn't assume zero-initialized memory.
515 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
516 {
517 ANGLE_TRY(InitMappableAllocation(context, allocator, &mAllocation, size, kNonZeroInitValue,
518 requiredFlags));
519 }
520
521 return angle::Result::Continue;
522 }
523
release(ContextVk * contextVk)524 void StagingBuffer::release(ContextVk *contextVk)
525 {
526 contextVk->addGarbage(&mBuffer);
527 contextVk->addGarbage(&mAllocation);
528 }
529
collectGarbage(Renderer * renderer,const QueueSerial & queueSerial)530 void StagingBuffer::collectGarbage(Renderer *renderer, const QueueSerial &queueSerial)
531 {
532 GarbageObjects garbageObjects;
533 garbageObjects.emplace_back(GetGarbage(&mBuffer));
534 garbageObjects.emplace_back(GetGarbage(&mAllocation));
535
536 ResourceUse use(queueSerial);
537 renderer->collectGarbage(use, std::move(garbageObjects));
538 }
539
InitMappableAllocation(ErrorContext * context,const Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)540 angle::Result InitMappableAllocation(ErrorContext *context,
541 const Allocator &allocator,
542 Allocation *allocation,
543 VkDeviceSize size,
544 int value,
545 VkMemoryPropertyFlags memoryPropertyFlags)
546 {
547 uint8_t *mapPointer;
548 ANGLE_VK_TRY(context, allocation->map(allocator, &mapPointer));
549 memset(mapPointer, value, static_cast<size_t>(size));
550
551 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
552 {
553 allocation->flush(allocator, 0, size);
554 }
555
556 allocation->unmap(allocator);
557
558 return angle::Result::Continue;
559 }
560
AllocateBufferMemory(ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)561 VkResult AllocateBufferMemory(ErrorContext *context,
562 vk::MemoryAllocationType memoryAllocationType,
563 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
564 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
565 const void *extraAllocationInfo,
566 Buffer *buffer,
567 uint32_t *memoryTypeIndexOut,
568 DeviceMemory *deviceMemoryOut,
569 VkDeviceSize *sizeOut)
570 {
571 return AllocateBufferOrImageMemory(context, memoryAllocationType, requestedMemoryPropertyFlags,
572 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
573 memoryTypeIndexOut, deviceMemoryOut, sizeOut);
574 }
575
AllocateImageMemory(ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Image * image,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)576 VkResult AllocateImageMemory(ErrorContext *context,
577 vk::MemoryAllocationType memoryAllocationType,
578 VkMemoryPropertyFlags memoryPropertyFlags,
579 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
580 const void *extraAllocationInfo,
581 Image *image,
582 uint32_t *memoryTypeIndexOut,
583 DeviceMemory *deviceMemoryOut,
584 VkDeviceSize *sizeOut)
585 {
586 return AllocateBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
587 memoryPropertyFlagsOut, extraAllocationInfo, image,
588 memoryTypeIndexOut, deviceMemoryOut, sizeOut);
589 }
590
AllocateImageMemoryWithRequirements(ErrorContext * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,Image * image,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut)591 VkResult AllocateImageMemoryWithRequirements(ErrorContext *context,
592 vk::MemoryAllocationType memoryAllocationType,
593 VkMemoryPropertyFlags memoryPropertyFlags,
594 const VkMemoryRequirements &memoryRequirements,
595 const void *extraAllocationInfo,
596 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
597 Image *image,
598 uint32_t *memoryTypeIndexOut,
599 DeviceMemory *deviceMemoryOut)
600 {
601 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
602 return AllocateAndBindBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
603 &memoryPropertyFlagsOut, memoryRequirements,
604 extraAllocationInfo, extraBindInfo, image,
605 memoryTypeIndexOut, deviceMemoryOut);
606 }
607
AllocateBufferMemoryWithRequirements(ErrorContext * context,MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Buffer * buffer,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut)608 VkResult AllocateBufferMemoryWithRequirements(ErrorContext *context,
609 MemoryAllocationType memoryAllocationType,
610 VkMemoryPropertyFlags memoryPropertyFlags,
611 const VkMemoryRequirements &memoryRequirements,
612 const void *extraAllocationInfo,
613 Buffer *buffer,
614 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
615 uint32_t *memoryTypeIndexOut,
616 DeviceMemory *deviceMemoryOut)
617 {
618 return AllocateAndBindBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
619 memoryPropertyFlagsOut, memoryRequirements,
620 extraAllocationInfo, nullptr, buffer,
621 memoryTypeIndexOut, deviceMemoryOut);
622 }
623
InitShaderModule(ErrorContext * context,ShaderModulePtr * shaderModulePtr,const uint32_t * shaderCode,size_t shaderCodeSize)624 angle::Result InitShaderModule(ErrorContext *context,
625 ShaderModulePtr *shaderModulePtr,
626 const uint32_t *shaderCode,
627 size_t shaderCodeSize)
628 {
629 ASSERT(!(*shaderModulePtr));
630 VkShaderModuleCreateInfo createInfo = {};
631 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
632 createInfo.flags = 0;
633 createInfo.codeSize = shaderCodeSize;
634 createInfo.pCode = shaderCode;
635
636 ShaderModulePtr newShaderModule = ShaderModulePtr::MakeShared(context->getDevice());
637 ANGLE_VK_TRY(context, newShaderModule->init(context->getDevice(), createInfo));
638
639 *shaderModulePtr = std::move(newShaderModule);
640
641 return angle::Result::Continue;
642 }
643
Get2DTextureType(uint32_t layerCount,GLint samples)644 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
645 {
646 if (layerCount > 1)
647 {
648 if (samples > 1)
649 {
650 return gl::TextureType::_2DMultisampleArray;
651 }
652 else
653 {
654 return gl::TextureType::_2DArray;
655 }
656 }
657 else
658 {
659 if (samples > 1)
660 {
661 return gl::TextureType::_2DMultisample;
662 }
663 else
664 {
665 return gl::TextureType::_2D;
666 }
667 }
668 }
669
GarbageObject()670 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
671
GarbageObject(HandleType handleType,GarbageHandle handle)672 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
673 : mHandleType(handleType), mHandle(handle)
674 {}
675
GarbageObject(GarbageObject && other)676 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
677 {
678 *this = std::move(other);
679 }
680
operator =(GarbageObject && rhs)681 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
682 {
683 std::swap(mHandle, rhs.mHandle);
684 std::swap(mHandleType, rhs.mHandleType);
685 return *this;
686 }
687
688 // GarbageObject implementation
689 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
690 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(Renderer * renderer)691 void GarbageObject::destroy(Renderer *renderer)
692 {
693 ANGLE_TRACE_EVENT0("gpu.angle", "GarbageObject::destroy");
694 VkDevice device = renderer->getDevice();
695 switch (mHandleType)
696 {
697 case HandleType::Semaphore:
698 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
699 break;
700 case HandleType::CommandBuffer:
701 // Command buffers are pool allocated.
702 UNREACHABLE();
703 break;
704 case HandleType::Event:
705 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
706 break;
707 case HandleType::Fence:
708 vkDestroyFence(device, (VkFence)mHandle, nullptr);
709 break;
710 case HandleType::DeviceMemory:
711 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
712 break;
713 case HandleType::Buffer:
714 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
715 break;
716 case HandleType::BufferView:
717 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
718 break;
719 case HandleType::Image:
720 vkDestroyImage(device, (VkImage)mHandle, nullptr);
721 break;
722 case HandleType::ImageView:
723 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
724 break;
725 case HandleType::ShaderModule:
726 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
727 break;
728 case HandleType::PipelineLayout:
729 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
730 break;
731 case HandleType::RenderPass:
732 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
733 break;
734 case HandleType::Pipeline:
735 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
736 break;
737 case HandleType::DescriptorSetLayout:
738 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
739 break;
740 case HandleType::Sampler:
741 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
742 break;
743 case HandleType::DescriptorPool:
744 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
745 break;
746 case HandleType::Framebuffer:
747 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
748 break;
749 case HandleType::CommandPool:
750 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
751 break;
752 case HandleType::QueryPool:
753 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
754 break;
755 case HandleType::Allocation:
756 vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
757 break;
758 default:
759 UNREACHABLE();
760 break;
761 }
762
763 renderer->onDeallocateHandle(mHandleType, 1);
764 }
765
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)766 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
767 {
768 static constexpr angle::ColorF kLabelColors[6] = {
769 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
770 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
771 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
772 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
773 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
774 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
775 };
776
777 int colorIndex = source - GL_DEBUG_SOURCE_API;
778 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
779
780 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
781 label->pNext = nullptr;
782 label->pLabelName = marker;
783 kLabelColors[colorIndex].writeData(label->color);
784 }
785
SetDebugUtilsObjectName(ContextVk * contextVk,VkObjectType objectType,uint64_t handle,const std::string & label)786 angle::Result SetDebugUtilsObjectName(ContextVk *contextVk,
787 VkObjectType objectType,
788 uint64_t handle,
789 const std::string &label)
790 {
791 Renderer *renderer = contextVk->getRenderer();
792
793 VkDebugUtilsObjectNameInfoEXT objectNameInfo = {};
794 objectNameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
795 objectNameInfo.objectType = objectType;
796 objectNameInfo.objectHandle = handle;
797 objectNameInfo.pObjectName = label.c_str();
798
799 if (vkSetDebugUtilsObjectNameEXT)
800 {
801 ANGLE_VK_TRY(contextVk,
802 vkSetDebugUtilsObjectNameEXT(renderer->getDevice(), &objectNameInfo));
803 }
804 return angle::Result::Continue;
805 }
806
807 // ClearValuesArray implementation.
ClearValuesArray()808 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
809
810 ClearValuesArray::~ClearValuesArray() = default;
811
812 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
813
814 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
815
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)816 void ClearValuesArray::store(uint32_t index,
817 VkImageAspectFlags aspectFlags,
818 const VkClearValue &clearValue)
819 {
820 ASSERT(aspectFlags != 0);
821
822 // We do this double if to handle the packed depth-stencil case.
823 if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
824 {
825 // Ensure for packed DS we're writing to the depth index.
826 ASSERT(index == kUnpackedDepthIndex ||
827 (index == kUnpackedStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
828
829 storeNoDepthStencil(kUnpackedStencilIndex, clearValue);
830 }
831
832 if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
833 {
834 storeNoDepthStencil(index, clearValue);
835 }
836 }
837
storeNoDepthStencil(uint32_t index,const VkClearValue & clearValue)838 void ClearValuesArray::storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue)
839 {
840 mValues[index] = clearValue;
841 mEnabled.set(index);
842 }
843
getColorMask() const844 gl::DrawBufferMask ClearValuesArray::getColorMask() const
845 {
846 return gl::DrawBufferMask(mEnabled.bits() & kUnpackedColorBuffersMask);
847 }
848
849 // ResourceSerialFactory implementation.
ResourceSerialFactory()850 ResourceSerialFactory::ResourceSerialFactory() : mCurrentUniqueSerial(1) {}
851
~ResourceSerialFactory()852 ResourceSerialFactory::~ResourceSerialFactory() {}
853
issueSerial()854 uint32_t ResourceSerialFactory::issueSerial()
855 {
856 uint32_t newSerial = ++mCurrentUniqueSerial;
857 // make sure serial does not wrap
858 ASSERT(newSerial > 0);
859 return newSerial;
860 }
861
862 #define ANGLE_DEFINE_GEN_VK_SERIAL(Type) \
863 Type##Serial ResourceSerialFactory::generate##Type##Serial() \
864 { \
865 return Type##Serial(issueSerial()); \
866 }
867
ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)868 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)
869
870 void ClampViewport(VkViewport *viewport)
871 {
872 // 0-sized viewports are invalid in Vulkan.
873 ASSERT(viewport);
874 if (viewport->width == 0.0f)
875 {
876 viewport->width = 1.0f;
877 }
878 if (viewport->height == 0.0f)
879 {
880 viewport->height = 1.0f;
881 }
882 }
883
ApplyPipelineCreationFeedback(ErrorContext * context,const VkPipelineCreationFeedback & feedback)884 void ApplyPipelineCreationFeedback(ErrorContext *context,
885 const VkPipelineCreationFeedback &feedback)
886 {
887 const bool cacheHit =
888 (feedback.flags & VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT) != 0;
889
890 angle::VulkanPerfCounters &perfCounters = context->getPerfCounters();
891
892 if (cacheHit)
893 {
894 ++perfCounters.pipelineCreationCacheHits;
895 perfCounters.pipelineCreationTotalCacheHitsDurationNs += feedback.duration;
896 }
897 else
898 {
899 ++perfCounters.pipelineCreationCacheMisses;
900 perfCounters.pipelineCreationTotalCacheMissesDurationNs += feedback.duration;
901 }
902 }
903
hash() const904 size_t MemoryAllocInfoMapKey::hash() const
905 {
906 return angle::ComputeGenericHash(*this);
907 }
908 } // namespace vk
909
910 #if !defined(ANGLE_SHARED_LIBVULKAN)
911 // VK_EXT_debug_utils
912 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
913 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
914 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
915 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
916 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
917 PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = nullptr;
918
919 // VK_KHR_get_physical_device_properties2
920 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
921 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = nullptr;
922 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = nullptr;
923
924 // VK_KHR_external_semaphore_fd
925 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
926
927 // VK_EXT_device_fault
928 PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = nullptr;
929
930 // VK_EXT_host_query_reset
931 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = nullptr;
932
933 // VK_EXT_transform_feedback
934 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
935 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = nullptr;
936 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = nullptr;
937 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = nullptr;
938 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = nullptr;
939 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = nullptr;
940
941 // VK_KHR_get_memory_requirements2
942 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
943 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = nullptr;
944
945 // VK_KHR_bind_memory2
946 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = nullptr;
947 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = nullptr;
948
949 // VK_KHR_external_fence_capabilities
950 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
951 nullptr;
952
953 // VK_KHR_external_fence_fd
954 PFN_vkGetFenceFdKHR vkGetFenceFdKHR = nullptr;
955 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
956
957 // VK_KHR_external_semaphore_capabilities
958 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
959 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
960
961 // VK_KHR_sampler_ycbcr_conversion
962 PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = nullptr;
963 PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = nullptr;
964
965 // VK_KHR_create_renderpass2
966 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = nullptr;
967
968 # if defined(ANGLE_PLATFORM_FUCHSIA)
969 // VK_FUCHSIA_imagepipe_surface
970 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
971 # endif
972
973 # if defined(ANGLE_PLATFORM_ANDROID)
974 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
975 nullptr;
976 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
977 # endif
978
979 # define GET_INSTANCE_FUNC(vkName) \
980 do \
981 { \
982 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
983 ASSERT(vkName); \
984 } while (0)
985
986 # define GET_DEVICE_FUNC(vkName) \
987 do \
988 { \
989 vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
990 ASSERT(vkName); \
991 } while (0)
992
993 // VK_KHR_shared_presentable_image
994 PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = nullptr;
995
996 // VK_EXT_extended_dynamic_state
997 PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = nullptr;
998 PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = nullptr;
999 PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = nullptr;
1000 PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = nullptr;
1001 PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = nullptr;
1002 PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = nullptr;
1003 PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = nullptr;
1004 PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = nullptr;
1005 PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = nullptr;
1006 PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = nullptr;
1007 PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = nullptr;
1008 PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = nullptr;
1009
1010 // VK_EXT_extended_dynamic_state2
1011 PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = nullptr;
1012 PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = nullptr;
1013 PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = nullptr;
1014 PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = nullptr;
1015 PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = nullptr;
1016
1017 // VK_EXT_vertex_input_dynamic_state
1018 PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = nullptr;
1019
1020 // VK_KHR_dynamic_rendering
1021 PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = nullptr;
1022 PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = nullptr;
1023
1024 // VK_KHR_dynamic_rendering_local_read
1025 PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = nullptr;
1026 PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = nullptr;
1027
1028 // VK_KHR_fragment_shading_rate
1029 PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = nullptr;
1030 PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = nullptr;
1031
1032 // VK_GOOGLE_display_timing
1033 PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = nullptr;
1034
1035 // VK_EXT_host_image_copy
1036 PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = nullptr;
1037 PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = nullptr;
1038 PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = nullptr;
1039 PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = nullptr;
1040 PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = nullptr;
1041
1042 // VK_KHR_Synchronization2
1043 PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = nullptr;
1044 PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = nullptr;
1045
InitDebugUtilsEXTFunctions(VkInstance instance)1046 void InitDebugUtilsEXTFunctions(VkInstance instance)
1047 {
1048 GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
1049 GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
1050 GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
1051 GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
1052 GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
1053 GET_INSTANCE_FUNC(vkSetDebugUtilsObjectNameEXT);
1054 }
1055
InitTransformFeedbackEXTFunctions(VkDevice device)1056 void InitTransformFeedbackEXTFunctions(VkDevice device)
1057 {
1058 GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
1059 GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
1060 GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
1061 GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
1062 GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
1063 GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
1064 }
1065
1066 // VK_KHR_create_renderpass2
InitRenderPass2KHRFunctions(VkDevice device)1067 void InitRenderPass2KHRFunctions(VkDevice device)
1068 {
1069 GET_DEVICE_FUNC(vkCreateRenderPass2KHR);
1070 }
1071
1072 # if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)1073 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
1074 {
1075 GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
1076 }
1077 # endif
1078
1079 # if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkDevice device)1080 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkDevice device)
1081 {
1082 GET_DEVICE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
1083 GET_DEVICE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
1084 }
1085 # endif
1086
InitExternalSemaphoreFdFunctions(VkDevice device)1087 void InitExternalSemaphoreFdFunctions(VkDevice device)
1088 {
1089 GET_DEVICE_FUNC(vkImportSemaphoreFdKHR);
1090 }
1091
InitDeviceFaultFunctions(VkDevice device)1092 void InitDeviceFaultFunctions(VkDevice device)
1093 {
1094 GET_DEVICE_FUNC(vkGetDeviceFaultInfoEXT);
1095 }
1096
InitHostQueryResetFunctions(VkDevice device)1097 void InitHostQueryResetFunctions(VkDevice device)
1098 {
1099 GET_DEVICE_FUNC(vkResetQueryPoolEXT);
1100 }
1101
1102 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkDevice device)1103 void InitExternalFenceFdFunctions(VkDevice device)
1104 {
1105 GET_DEVICE_FUNC(vkGetFenceFdKHR);
1106 GET_DEVICE_FUNC(vkImportFenceFdKHR);
1107 }
1108
1109 // VK_KHR_shared_presentable_image
InitGetSwapchainStatusKHRFunctions(VkDevice device)1110 void InitGetSwapchainStatusKHRFunctions(VkDevice device)
1111 {
1112 GET_DEVICE_FUNC(vkGetSwapchainStatusKHR);
1113 }
1114
1115 // VK_EXT_extended_dynamic_state
InitExtendedDynamicStateEXTFunctions(VkDevice device)1116 void InitExtendedDynamicStateEXTFunctions(VkDevice device)
1117 {
1118 GET_DEVICE_FUNC(vkCmdBindVertexBuffers2EXT);
1119 GET_DEVICE_FUNC(vkCmdSetCullModeEXT);
1120 GET_DEVICE_FUNC(vkCmdSetDepthBoundsTestEnableEXT);
1121 GET_DEVICE_FUNC(vkCmdSetDepthCompareOpEXT);
1122 GET_DEVICE_FUNC(vkCmdSetDepthTestEnableEXT);
1123 GET_DEVICE_FUNC(vkCmdSetDepthWriteEnableEXT);
1124 GET_DEVICE_FUNC(vkCmdSetFrontFaceEXT);
1125 GET_DEVICE_FUNC(vkCmdSetPrimitiveTopologyEXT);
1126 GET_DEVICE_FUNC(vkCmdSetScissorWithCountEXT);
1127 GET_DEVICE_FUNC(vkCmdSetStencilOpEXT);
1128 GET_DEVICE_FUNC(vkCmdSetStencilTestEnableEXT);
1129 GET_DEVICE_FUNC(vkCmdSetViewportWithCountEXT);
1130 }
1131
1132 // VK_EXT_extended_dynamic_state2
InitExtendedDynamicState2EXTFunctions(VkDevice device)1133 void InitExtendedDynamicState2EXTFunctions(VkDevice device)
1134 {
1135 GET_DEVICE_FUNC(vkCmdSetDepthBiasEnableEXT);
1136 GET_DEVICE_FUNC(vkCmdSetLogicOpEXT);
1137 GET_DEVICE_FUNC(vkCmdSetPatchControlPointsEXT);
1138 GET_DEVICE_FUNC(vkCmdSetPrimitiveRestartEnableEXT);
1139 GET_DEVICE_FUNC(vkCmdSetRasterizerDiscardEnableEXT);
1140 }
1141
1142 // VK_EXT_vertex_input_dynamic_state
InitVertexInputDynamicStateEXTFunctions(VkDevice device)1143 void InitVertexInputDynamicStateEXTFunctions(VkDevice device)
1144 {
1145 GET_DEVICE_FUNC(vkCmdSetVertexInputEXT);
1146 }
1147
1148 // VK_KHR_dynamic_rendering
InitDynamicRenderingFunctions(VkDevice device)1149 void InitDynamicRenderingFunctions(VkDevice device)
1150 {
1151 GET_DEVICE_FUNC(vkCmdBeginRenderingKHR);
1152 GET_DEVICE_FUNC(vkCmdEndRenderingKHR);
1153 }
1154
1155 // VK_KHR_dynamic_rendering_local_read
InitDynamicRenderingLocalReadFunctions(VkDevice device)1156 void InitDynamicRenderingLocalReadFunctions(VkDevice device)
1157 {
1158 GET_DEVICE_FUNC(vkCmdSetRenderingAttachmentLocationsKHR);
1159 GET_DEVICE_FUNC(vkCmdSetRenderingInputAttachmentIndicesKHR);
1160 }
1161
1162 // VK_KHR_fragment_shading_rate
InitFragmentShadingRateKHRInstanceFunction(VkInstance instance)1163 void InitFragmentShadingRateKHRInstanceFunction(VkInstance instance)
1164 {
1165 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFragmentShadingRatesKHR);
1166 }
1167
InitFragmentShadingRateKHRDeviceFunction(VkDevice device)1168 void InitFragmentShadingRateKHRDeviceFunction(VkDevice device)
1169 {
1170 GET_DEVICE_FUNC(vkCmdSetFragmentShadingRateKHR);
1171 }
1172
1173 // VK_GOOGLE_display_timing
InitGetPastPresentationTimingGoogleFunction(VkDevice device)1174 void InitGetPastPresentationTimingGoogleFunction(VkDevice device)
1175 {
1176 GET_DEVICE_FUNC(vkGetPastPresentationTimingGOOGLE);
1177 }
1178
1179 // VK_EXT_host_image_copy
InitHostImageCopyFunctions(VkDevice device)1180 void InitHostImageCopyFunctions(VkDevice device)
1181 {
1182 GET_DEVICE_FUNC(vkCopyImageToImageEXT);
1183 GET_DEVICE_FUNC(vkCopyImageToMemoryEXT);
1184 GET_DEVICE_FUNC(vkCopyMemoryToImageEXT);
1185 GET_DEVICE_FUNC(vkGetImageSubresourceLayout2EXT);
1186 GET_DEVICE_FUNC(vkTransitionImageLayoutEXT);
1187 }
1188
InitSynchronization2Functions(VkDevice device)1189 void InitSynchronization2Functions(VkDevice device)
1190 {
1191 GET_DEVICE_FUNC(vkCmdPipelineBarrier2KHR);
1192 GET_DEVICE_FUNC(vkCmdWriteTimestamp2KHR);
1193 }
1194
1195 # undef GET_INSTANCE_FUNC
1196 # undef GET_DEVICE_FUNC
1197
1198 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1199
1200 #define ASSIGN_FROM_CORE(vkName, EXT) \
1201 do \
1202 { \
1203 /* The core entry point must be present */ \
1204 ASSERT(vkName != nullptr); \
1205 vkName##EXT = vkName; \
1206 } while (0)
1207
InitGetPhysicalDeviceProperties2KHRFunctionsFromCore()1208 void InitGetPhysicalDeviceProperties2KHRFunctionsFromCore()
1209 {
1210 ASSIGN_FROM_CORE(vkGetPhysicalDeviceProperties2, KHR);
1211 ASSIGN_FROM_CORE(vkGetPhysicalDeviceFeatures2, KHR);
1212 ASSIGN_FROM_CORE(vkGetPhysicalDeviceMemoryProperties2, KHR);
1213 }
1214
InitExternalFenceCapabilitiesFunctionsFromCore()1215 void InitExternalFenceCapabilitiesFunctionsFromCore()
1216 {
1217 ASSIGN_FROM_CORE(vkGetPhysicalDeviceExternalFenceProperties, KHR);
1218 }
1219
InitExternalSemaphoreCapabilitiesFunctionsFromCore()1220 void InitExternalSemaphoreCapabilitiesFunctionsFromCore()
1221 {
1222 ASSIGN_FROM_CORE(vkGetPhysicalDeviceExternalSemaphoreProperties, KHR);
1223 }
1224
InitSamplerYcbcrKHRFunctionsFromCore()1225 void InitSamplerYcbcrKHRFunctionsFromCore()
1226 {
1227 ASSIGN_FROM_CORE(vkCreateSamplerYcbcrConversion, KHR);
1228 ASSIGN_FROM_CORE(vkDestroySamplerYcbcrConversion, KHR);
1229 }
1230
InitGetMemoryRequirements2KHRFunctionsFromCore()1231 void InitGetMemoryRequirements2KHRFunctionsFromCore()
1232 {
1233 ASSIGN_FROM_CORE(vkGetBufferMemoryRequirements2, KHR);
1234 ASSIGN_FROM_CORE(vkGetImageMemoryRequirements2, KHR);
1235 }
1236
InitBindMemory2KHRFunctionsFromCore()1237 void InitBindMemory2KHRFunctionsFromCore()
1238 {
1239 ASSIGN_FROM_CORE(vkBindBufferMemory2, KHR);
1240 ASSIGN_FROM_CORE(vkBindImageMemory2, KHR);
1241 }
1242
1243 #undef ASSIGN_FROM_CORE
1244
CalculateGenerateMipmapFilter(ContextVk * contextVk,angle::FormatID formatID)1245 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID)
1246 {
1247 const bool formatSupportsLinearFiltering = contextVk->getRenderer()->hasImageFormatFeatureBits(
1248 formatID, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
1249 const bool hintFastest = contextVk->getState().getGenerateMipmapHint() == GL_FASTEST;
1250
1251 return formatSupportsLinearFiltering && !hintFastest ? GL_LINEAR : GL_NEAREST;
1252 }
1253
1254 namespace gl_vk
1255 {
1256
GetFilter(const GLenum filter)1257 VkFilter GetFilter(const GLenum filter)
1258 {
1259 switch (filter)
1260 {
1261 case GL_LINEAR_MIPMAP_LINEAR:
1262 case GL_LINEAR_MIPMAP_NEAREST:
1263 case GL_LINEAR:
1264 return VK_FILTER_LINEAR;
1265 case GL_NEAREST_MIPMAP_LINEAR:
1266 case GL_NEAREST_MIPMAP_NEAREST:
1267 case GL_NEAREST:
1268 return VK_FILTER_NEAREST;
1269 default:
1270 UNIMPLEMENTED();
1271 return VK_FILTER_MAX_ENUM;
1272 }
1273 }
1274
GetSamplerMipmapMode(const GLenum filter)1275 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
1276 {
1277 switch (filter)
1278 {
1279 case GL_LINEAR_MIPMAP_LINEAR:
1280 case GL_NEAREST_MIPMAP_LINEAR:
1281 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
1282 case GL_LINEAR:
1283 case GL_NEAREST:
1284 case GL_NEAREST_MIPMAP_NEAREST:
1285 case GL_LINEAR_MIPMAP_NEAREST:
1286 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
1287 default:
1288 UNIMPLEMENTED();
1289 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
1290 }
1291 }
1292
GetSamplerAddressMode(const GLenum wrap)1293 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
1294 {
1295 switch (wrap)
1296 {
1297 case GL_REPEAT:
1298 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
1299 case GL_MIRRORED_REPEAT:
1300 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
1301 case GL_CLAMP_TO_BORDER:
1302 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
1303 case GL_CLAMP_TO_EDGE:
1304 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1305 case GL_MIRROR_CLAMP_TO_EDGE_EXT:
1306 return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
1307 default:
1308 UNIMPLEMENTED();
1309 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
1310 }
1311 }
1312
GetPrimitiveTopology(gl::PrimitiveMode mode)1313 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
1314 {
1315 switch (mode)
1316 {
1317 case gl::PrimitiveMode::Triangles:
1318 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1319 case gl::PrimitiveMode::Points:
1320 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1321 case gl::PrimitiveMode::Lines:
1322 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
1323 case gl::PrimitiveMode::LineStrip:
1324 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1325 case gl::PrimitiveMode::TriangleFan:
1326 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
1327 case gl::PrimitiveMode::TriangleStrip:
1328 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1329 case gl::PrimitiveMode::LineLoop:
1330 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1331 case gl::PrimitiveMode::LinesAdjacency:
1332 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
1333 case gl::PrimitiveMode::LineStripAdjacency:
1334 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
1335 case gl::PrimitiveMode::TrianglesAdjacency:
1336 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
1337 case gl::PrimitiveMode::TriangleStripAdjacency:
1338 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
1339 case gl::PrimitiveMode::Patches:
1340 return VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
1341 default:
1342 UNREACHABLE();
1343 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1344 }
1345 }
1346
GetPolygonMode(const gl::PolygonMode polygonMode)1347 VkPolygonMode GetPolygonMode(const gl::PolygonMode polygonMode)
1348 {
1349 switch (polygonMode)
1350 {
1351 case gl::PolygonMode::Point:
1352 return VK_POLYGON_MODE_POINT;
1353 case gl::PolygonMode::Line:
1354 return VK_POLYGON_MODE_LINE;
1355 case gl::PolygonMode::Fill:
1356 return VK_POLYGON_MODE_FILL;
1357 default:
1358 UNREACHABLE();
1359 return VK_POLYGON_MODE_FILL;
1360 }
1361 }
1362
GetCullMode(const gl::RasterizerState & rasterState)1363 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
1364 {
1365 if (!rasterState.cullFace)
1366 {
1367 return VK_CULL_MODE_NONE;
1368 }
1369
1370 switch (rasterState.cullMode)
1371 {
1372 case gl::CullFaceMode::Front:
1373 return VK_CULL_MODE_FRONT_BIT;
1374 case gl::CullFaceMode::Back:
1375 return VK_CULL_MODE_BACK_BIT;
1376 case gl::CullFaceMode::FrontAndBack:
1377 return VK_CULL_MODE_FRONT_AND_BACK;
1378 default:
1379 UNREACHABLE();
1380 return VK_CULL_MODE_NONE;
1381 }
1382 }
1383
GetFrontFace(GLenum frontFace,bool invertCullFace)1384 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
1385 {
1386 // Invert CW and CCW to have the same behavior as OpenGL.
1387 switch (frontFace)
1388 {
1389 case GL_CW:
1390 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
1391 case GL_CCW:
1392 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
1393 default:
1394 UNREACHABLE();
1395 return VK_FRONT_FACE_CLOCKWISE;
1396 }
1397 }
1398
GetSamples(GLint sampleCount,bool limitSampleCountTo2)1399 VkSampleCountFlagBits GetSamples(GLint sampleCount, bool limitSampleCountTo2)
1400 {
1401 if (limitSampleCountTo2)
1402 {
1403 // Limiting samples to 2 allows multisampling to work while reducing
1404 // how much graphics memory is required. This makes ANGLE nonconformant
1405 // (GLES 3.0+ requires 4 samples minimum) but gives low memory systems a
1406 // better chance of running applications.
1407 sampleCount = std::min(sampleCount, 2);
1408 }
1409
1410 switch (sampleCount)
1411 {
1412 case 0:
1413 UNREACHABLE();
1414 return VK_SAMPLE_COUNT_1_BIT;
1415 case 1:
1416 return VK_SAMPLE_COUNT_1_BIT;
1417 case 2:
1418 return VK_SAMPLE_COUNT_2_BIT;
1419 case 4:
1420 return VK_SAMPLE_COUNT_4_BIT;
1421 case 8:
1422 return VK_SAMPLE_COUNT_8_BIT;
1423 case 16:
1424 return VK_SAMPLE_COUNT_16_BIT;
1425 case 32:
1426 return VK_SAMPLE_COUNT_32_BIT;
1427 default:
1428 UNREACHABLE();
1429 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1430 }
1431 }
1432
GetSwizzle(const GLenum swizzle)1433 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1434 {
1435 switch (swizzle)
1436 {
1437 case GL_ALPHA:
1438 return VK_COMPONENT_SWIZZLE_A;
1439 case GL_RED:
1440 return VK_COMPONENT_SWIZZLE_R;
1441 case GL_GREEN:
1442 return VK_COMPONENT_SWIZZLE_G;
1443 case GL_BLUE:
1444 return VK_COMPONENT_SWIZZLE_B;
1445 case GL_ZERO:
1446 return VK_COMPONENT_SWIZZLE_ZERO;
1447 case GL_ONE:
1448 return VK_COMPONENT_SWIZZLE_ONE;
1449 default:
1450 UNREACHABLE();
1451 return VK_COMPONENT_SWIZZLE_IDENTITY;
1452 }
1453 }
1454
GetCompareOp(const GLenum compareFunc)1455 VkCompareOp GetCompareOp(const GLenum compareFunc)
1456 {
1457 switch (compareFunc)
1458 {
1459 case GL_NEVER:
1460 return VK_COMPARE_OP_NEVER;
1461 case GL_LESS:
1462 return VK_COMPARE_OP_LESS;
1463 case GL_EQUAL:
1464 return VK_COMPARE_OP_EQUAL;
1465 case GL_LEQUAL:
1466 return VK_COMPARE_OP_LESS_OR_EQUAL;
1467 case GL_GREATER:
1468 return VK_COMPARE_OP_GREATER;
1469 case GL_NOTEQUAL:
1470 return VK_COMPARE_OP_NOT_EQUAL;
1471 case GL_GEQUAL:
1472 return VK_COMPARE_OP_GREATER_OR_EQUAL;
1473 case GL_ALWAYS:
1474 return VK_COMPARE_OP_ALWAYS;
1475 default:
1476 UNREACHABLE();
1477 return VK_COMPARE_OP_ALWAYS;
1478 }
1479 }
1480
GetStencilOp(GLenum compareOp)1481 VkStencilOp GetStencilOp(GLenum compareOp)
1482 {
1483 switch (compareOp)
1484 {
1485 case GL_KEEP:
1486 return VK_STENCIL_OP_KEEP;
1487 case GL_ZERO:
1488 return VK_STENCIL_OP_ZERO;
1489 case GL_REPLACE:
1490 return VK_STENCIL_OP_REPLACE;
1491 case GL_INCR:
1492 return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
1493 case GL_DECR:
1494 return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
1495 case GL_INCR_WRAP:
1496 return VK_STENCIL_OP_INCREMENT_AND_WRAP;
1497 case GL_DECR_WRAP:
1498 return VK_STENCIL_OP_DECREMENT_AND_WRAP;
1499 case GL_INVERT:
1500 return VK_STENCIL_OP_INVERT;
1501 default:
1502 UNREACHABLE();
1503 return VK_STENCIL_OP_KEEP;
1504 }
1505 }
1506
GetLogicOp(const GLenum logicOp)1507 VkLogicOp GetLogicOp(const GLenum logicOp)
1508 {
1509 // GL's logic op values are 0x1500 + op, where op is the same value as Vulkan's VkLogicOp.
1510 return static_cast<VkLogicOp>(logicOp - GL_CLEAR);
1511 }
1512
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1513 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1514 {
1515 vkOffset->x = glOffset.x;
1516 vkOffset->y = glOffset.y;
1517 vkOffset->z = glOffset.z;
1518 }
1519
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1520 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1521 {
1522 vkExtent->width = glExtent.width;
1523 vkExtent->height = glExtent.height;
1524 vkExtent->depth = glExtent.depth;
1525 }
1526
GetImageType(gl::TextureType textureType)1527 VkImageType GetImageType(gl::TextureType textureType)
1528 {
1529 switch (textureType)
1530 {
1531 case gl::TextureType::_2D:
1532 case gl::TextureType::_2DArray:
1533 case gl::TextureType::_2DMultisample:
1534 case gl::TextureType::_2DMultisampleArray:
1535 case gl::TextureType::CubeMap:
1536 case gl::TextureType::CubeMapArray:
1537 case gl::TextureType::External:
1538 return VK_IMAGE_TYPE_2D;
1539 case gl::TextureType::_3D:
1540 return VK_IMAGE_TYPE_3D;
1541 default:
1542 // We will need to implement all the texture types for ES3+.
1543 UNIMPLEMENTED();
1544 return VK_IMAGE_TYPE_MAX_ENUM;
1545 }
1546 }
1547
GetImageViewType(gl::TextureType textureType)1548 VkImageViewType GetImageViewType(gl::TextureType textureType)
1549 {
1550 switch (textureType)
1551 {
1552 case gl::TextureType::_2D:
1553 case gl::TextureType::_2DMultisample:
1554 case gl::TextureType::External:
1555 return VK_IMAGE_VIEW_TYPE_2D;
1556 case gl::TextureType::_2DArray:
1557 case gl::TextureType::_2DMultisampleArray:
1558 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1559 case gl::TextureType::_3D:
1560 return VK_IMAGE_VIEW_TYPE_3D;
1561 case gl::TextureType::CubeMap:
1562 return VK_IMAGE_VIEW_TYPE_CUBE;
1563 case gl::TextureType::CubeMapArray:
1564 return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
1565 default:
1566 // We will need to implement all the texture types for ES3+.
1567 UNIMPLEMENTED();
1568 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1569 }
1570 }
1571
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1572 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1573 {
1574 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1575 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1576 }
1577
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1578 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1579 {
1580 VkShaderStageFlags flags = 0;
1581 for (const gl::ShaderType shaderType : activeShaders)
1582 {
1583 flags |= kShaderStageMap[shaderType];
1584 }
1585 return flags;
1586 }
1587
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,bool clipSpaceOriginUpperLeft,GLint renderAreaHeight,VkViewport * viewportOut)1588 void GetViewport(const gl::Rectangle &viewport,
1589 float nearPlane,
1590 float farPlane,
1591 bool invertViewport,
1592 bool clipSpaceOriginUpperLeft,
1593 GLint renderAreaHeight,
1594 VkViewport *viewportOut)
1595 {
1596 viewportOut->x = static_cast<float>(viewport.x);
1597 viewportOut->y = static_cast<float>(viewport.y);
1598 viewportOut->width = static_cast<float>(viewport.width);
1599 viewportOut->height = static_cast<float>(viewport.height);
1600 viewportOut->minDepth = gl::clamp01(nearPlane);
1601 viewportOut->maxDepth = gl::clamp01(farPlane);
1602
1603 // Say an application intends to draw a primitive (shown as 'o' below), it can choose to use
1604 // different clip space origin. When clip space origin (shown as 'C' below) is switched from
1605 // lower-left to upper-left, primitives will be rendered with its y-coordinate flipped.
1606
1607 // Rendered content will differ based on whether it is a default framebuffer or a user defined
1608 // framebuffer. We modify the viewport's 'y' and 'h' accordingly.
1609
1610 // clip space origin is lower-left
1611 // Expected draw in GLES default framebuffer user defined framebuffer
1612 // (0,H) (0,0) (0,0)
1613 // + +-----------+ (W,0) +-----------+ (W,0)
1614 // | | | C----+
1615 // | | | | | (h)
1616 // | +----+ | +----+ | | O |
1617 // | | O | | | O | (-h) | +----+
1618 // | | | | | | |
1619 // | C----+ | C----+ |
1620 // +-----------+ (W,0) + +
1621 // (0,0) (0,H) (0,H)
1622 // y' = H - h y' = y
1623
1624 // clip space origin is upper-left
1625 // Expected draw in GLES default framebuffer user defined framebuffer
1626 // (0,H) (0,0) (0,0)
1627 // + +-----------+ (W,0) +-----------+ (W,0)
1628 // | | | +----+
1629 // | | | | O | (-h)
1630 // | C----+ | C----+ | | |
1631 // | | | | | | (h) | C----+
1632 // | | O | | | O | |
1633 // | +----+ | +----+ |
1634 // +-----------+ (W,0) + +
1635 // (0,0) (0,H) (0,H)
1636 // y' = H - (y + h) y' = y + H
1637
1638 if (clipSpaceOriginUpperLeft)
1639 {
1640 if (invertViewport)
1641 {
1642 viewportOut->y = static_cast<float>(renderAreaHeight - (viewport.height + viewport.y));
1643 }
1644 else
1645 {
1646 viewportOut->y = static_cast<float>(viewport.height + viewport.y);
1647 viewportOut->height = -viewportOut->height;
1648 }
1649 }
1650 else
1651 {
1652 if (invertViewport)
1653 {
1654 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
1655 viewportOut->height = -viewportOut->height;
1656 }
1657 }
1658 }
1659
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1660 void GetExtentsAndLayerCount(gl::TextureType textureType,
1661 const gl::Extents &extents,
1662 VkExtent3D *extentsOut,
1663 uint32_t *layerCountOut)
1664 {
1665 extentsOut->width = extents.width;
1666 extentsOut->height = extents.height;
1667
1668 switch (textureType)
1669 {
1670 case gl::TextureType::CubeMap:
1671 extentsOut->depth = 1;
1672 *layerCountOut = gl::kCubeFaceCount;
1673 break;
1674
1675 case gl::TextureType::_2DArray:
1676 case gl::TextureType::_2DMultisampleArray:
1677 case gl::TextureType::CubeMapArray:
1678 extentsOut->depth = 1;
1679 *layerCountOut = extents.depth;
1680 break;
1681
1682 default:
1683 extentsOut->depth = extents.depth;
1684 *layerCountOut = 1;
1685 break;
1686 }
1687 }
1688
GetLevelIndex(gl::LevelIndex levelGL,gl::LevelIndex baseLevel)1689 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel)
1690 {
1691 ASSERT(baseLevel <= levelGL);
1692 return vk::LevelIndex(levelGL.get() - baseLevel.get());
1693 }
1694
GetTilingMode(gl::TilingMode tilingMode)1695 VkImageTiling GetTilingMode(gl::TilingMode tilingMode)
1696 {
1697 switch (tilingMode)
1698 {
1699 case gl::TilingMode::Optimal:
1700 return VK_IMAGE_TILING_OPTIMAL;
1701 case gl::TilingMode::Linear:
1702 return VK_IMAGE_TILING_LINEAR;
1703 default:
1704 UNREACHABLE();
1705 return VK_IMAGE_TILING_OPTIMAL;
1706 }
1707 }
1708
ConvertEGLFixedRateToVkFixedRate(const EGLenum eglCompressionRate,const angle::FormatID actualFormatID)1709 VkImageCompressionFixedRateFlagsEXT ConvertEGLFixedRateToVkFixedRate(
1710 const EGLenum eglCompressionRate,
1711 const angle::FormatID actualFormatID)
1712 {
1713 switch (eglCompressionRate)
1714 {
1715 case EGL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT:
1716 return VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT;
1717 case EGL_SURFACE_COMPRESSION_FIXED_RATE_DEFAULT_EXT:
1718 return VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT;
1719 case EGL_SURFACE_COMPRESSION_FIXED_RATE_1BPC_EXT:
1720 return VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT;
1721 case EGL_SURFACE_COMPRESSION_FIXED_RATE_2BPC_EXT:
1722 return VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT;
1723 case EGL_SURFACE_COMPRESSION_FIXED_RATE_3BPC_EXT:
1724 return VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT;
1725 case EGL_SURFACE_COMPRESSION_FIXED_RATE_4BPC_EXT:
1726 return VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT;
1727 case EGL_SURFACE_COMPRESSION_FIXED_RATE_5BPC_EXT:
1728 return VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT;
1729 case EGL_SURFACE_COMPRESSION_FIXED_RATE_6BPC_EXT:
1730 return VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT;
1731 case EGL_SURFACE_COMPRESSION_FIXED_RATE_7BPC_EXT:
1732 return VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT;
1733 case EGL_SURFACE_COMPRESSION_FIXED_RATE_8BPC_EXT:
1734 return VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT;
1735 case EGL_SURFACE_COMPRESSION_FIXED_RATE_9BPC_EXT:
1736 return VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT;
1737 case EGL_SURFACE_COMPRESSION_FIXED_RATE_10BPC_EXT:
1738 return VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT;
1739 case EGL_SURFACE_COMPRESSION_FIXED_RATE_11BPC_EXT:
1740 return VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT;
1741 case EGL_SURFACE_COMPRESSION_FIXED_RATE_12BPC_EXT:
1742 return VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT;
1743 default:
1744 UNREACHABLE();
1745 return VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT;
1746 }
1747 }
1748
1749 } // namespace gl_vk
1750
1751 namespace vk_gl
1752 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1753 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1754 {
1755 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1756 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1757 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1758 {
1759 setOut->insert(static_cast<GLuint>(1 << bit));
1760 }
1761 }
1762
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1763 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1764 {
1765 GLuint maxCount = 0;
1766 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1767 {
1768 maxCount = static_cast<GLuint>(1 << bit);
1769 }
1770 return maxCount;
1771 }
1772
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1773 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1774 {
1775 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1776 {
1777 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1778 if (sampleCount >= requestedCount)
1779 {
1780 return sampleCount;
1781 }
1782 }
1783
1784 UNREACHABLE();
1785 return 0;
1786 }
1787
GetLevelIndex(vk::LevelIndex levelVk,gl::LevelIndex baseLevel)1788 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel)
1789 {
1790 return gl::LevelIndex(levelVk.get() + baseLevel.get());
1791 }
1792
ConvertVkFixedRateToGLFixedRate(const VkImageCompressionFixedRateFlagsEXT vkCompressionRate)1793 GLenum ConvertVkFixedRateToGLFixedRate(const VkImageCompressionFixedRateFlagsEXT vkCompressionRate)
1794 {
1795 switch (vkCompressionRate)
1796 {
1797 case VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT:
1798 return GL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1799 case VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT:
1800 return GL_SURFACE_COMPRESSION_FIXED_RATE_1BPC_EXT;
1801 case VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT:
1802 return GL_SURFACE_COMPRESSION_FIXED_RATE_2BPC_EXT;
1803 case VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT:
1804 return GL_SURFACE_COMPRESSION_FIXED_RATE_3BPC_EXT;
1805 case VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT:
1806 return GL_SURFACE_COMPRESSION_FIXED_RATE_4BPC_EXT;
1807 case VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT:
1808 return GL_SURFACE_COMPRESSION_FIXED_RATE_5BPC_EXT;
1809 case VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT:
1810 return GL_SURFACE_COMPRESSION_FIXED_RATE_6BPC_EXT;
1811 case VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT:
1812 return GL_SURFACE_COMPRESSION_FIXED_RATE_7BPC_EXT;
1813 case VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT:
1814 return GL_SURFACE_COMPRESSION_FIXED_RATE_8BPC_EXT;
1815 case VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT:
1816 return GL_SURFACE_COMPRESSION_FIXED_RATE_9BPC_EXT;
1817 case VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT:
1818 return GL_SURFACE_COMPRESSION_FIXED_RATE_10BPC_EXT;
1819 case VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT:
1820 return GL_SURFACE_COMPRESSION_FIXED_RATE_11BPC_EXT;
1821 case VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT:
1822 return GL_SURFACE_COMPRESSION_FIXED_RATE_12BPC_EXT;
1823 default:
1824 UNREACHABLE();
1825 return GL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1826 }
1827 }
1828
ConvertCompressionFlagsToGLFixedRates(VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags,GLint bufSize,GLint * rates)1829 GLint ConvertCompressionFlagsToGLFixedRates(
1830 VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags,
1831 GLint bufSize,
1832 GLint *rates)
1833 {
1834 if (imageCompressionFixedRateFlags == VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT)
1835 {
1836 if (nullptr != rates)
1837 {
1838 rates[0] = GL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1839 }
1840 return 0;
1841 }
1842 VkImageCompressionFixedRateFlagsEXT tmpFlags = imageCompressionFixedRateFlags;
1843 uint8_t bitCount = 0;
1844 angle::FastVector<GLint, 4> GLRates;
1845
1846 while (tmpFlags > 0)
1847 {
1848 if ((tmpFlags & 1) == true)
1849 {
1850 GLRates.push_back(ConvertVkFixedRateToGLFixedRate(1 << bitCount));
1851 }
1852 bitCount += 1;
1853 tmpFlags >>= 1;
1854 }
1855
1856 GLint size = static_cast<GLint>(GLRates.size());
1857 // rates could be nullprt, as user only want get the size(count) of rates
1858 if (nullptr != rates && size <= bufSize)
1859 {
1860 std::copy(GLRates.begin(), GLRates.end(), rates);
1861 }
1862 return size;
1863 }
1864
ConvertVkFixedRateToEGLFixedRate(const VkImageCompressionFixedRateFlagsEXT vkCompressionRate)1865 EGLenum ConvertVkFixedRateToEGLFixedRate(
1866 const VkImageCompressionFixedRateFlagsEXT vkCompressionRate)
1867 {
1868 switch (vkCompressionRate)
1869 {
1870 case VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT:
1871 return EGL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1872 case VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT:
1873 return EGL_SURFACE_COMPRESSION_FIXED_RATE_1BPC_EXT;
1874 case VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT:
1875 return EGL_SURFACE_COMPRESSION_FIXED_RATE_2BPC_EXT;
1876 case VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT:
1877 return EGL_SURFACE_COMPRESSION_FIXED_RATE_3BPC_EXT;
1878 case VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT:
1879 return EGL_SURFACE_COMPRESSION_FIXED_RATE_4BPC_EXT;
1880 case VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT:
1881 return EGL_SURFACE_COMPRESSION_FIXED_RATE_5BPC_EXT;
1882 case VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT:
1883 return EGL_SURFACE_COMPRESSION_FIXED_RATE_6BPC_EXT;
1884 case VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT:
1885 return EGL_SURFACE_COMPRESSION_FIXED_RATE_7BPC_EXT;
1886 case VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT:
1887 return EGL_SURFACE_COMPRESSION_FIXED_RATE_8BPC_EXT;
1888 case VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT:
1889 return EGL_SURFACE_COMPRESSION_FIXED_RATE_9BPC_EXT;
1890 case VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT:
1891 return EGL_SURFACE_COMPRESSION_FIXED_RATE_10BPC_EXT;
1892 case VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT:
1893 return EGL_SURFACE_COMPRESSION_FIXED_RATE_11BPC_EXT;
1894 case VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT:
1895 return EGL_SURFACE_COMPRESSION_FIXED_RATE_12BPC_EXT;
1896 default:
1897 UNREACHABLE();
1898 return EGL_SURFACE_COMPRESSION_FIXED_RATE_NONE_EXT;
1899 }
1900 }
1901
ConvertCompressionFlagsToEGLFixedRate(VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags,size_t rateSize)1902 std::vector<EGLint> ConvertCompressionFlagsToEGLFixedRate(
1903 VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags,
1904 size_t rateSize)
1905 {
1906 std::vector<EGLint> EGLRates;
1907
1908 for (size_t bit : angle::BitSet<32>(imageCompressionFixedRateFlags))
1909 {
1910 if (EGLRates.size() >= rateSize)
1911 {
1912 break;
1913 }
1914
1915 EGLRates.push_back(ConvertVkFixedRateToEGLFixedRate(angle::Bit<uint32_t>(bit)));
1916 }
1917
1918 return EGLRates;
1919 }
1920 } // namespace vk_gl
1921 } // namespace rx
1922