1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/Display.h"
14 #include "libANGLE/renderer/vulkan/BufferVk.h"
15 #include "libANGLE/renderer/vulkan/ContextVk.h"
16 #include "libANGLE/renderer/vulkan/DisplayVk.h"
17 #include "libANGLE/renderer/vulkan/RendererVk.h"
18 #include "libANGLE/renderer/vulkan/ResourceVk.h"
19 #include "libANGLE/renderer/vulkan/android/vk_android_utils.h"
20 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
21
22 namespace angle
23 {
ToEGL(Result result,EGLint errorCode)24 egl::Error ToEGL(Result result, EGLint errorCode)
25 {
26 if (result != angle::Result::Continue)
27 {
28 egl::Error error = std::move(*egl::Display::GetCurrentThreadErrorScratchSpace());
29 error.setCode(errorCode);
30 return error;
31 }
32 else
33 {
34 return egl::NoError();
35 }
36 }
37 } // namespace angle
38
39 namespace rx
40 {
41 namespace
42 {
43 // Pick an arbitrary value to initialize non-zero memory for sanitization. Note that 0x3F3F3F3F
44 // as float is about 0.75.
45 constexpr int kNonZeroInitValue = 0x3F;
46
GetStagingBufferUsageFlags(vk::StagingUsage usage)47 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
48 {
49 switch (usage)
50 {
51 case vk::StagingUsage::Read:
52 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
53 case vk::StagingUsage::Write:
54 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
55 case vk::StagingUsage::Both:
56 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
57 default:
58 UNREACHABLE();
59 return 0;
60 }
61 }
62
FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties & memoryProperties,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut)63 bool FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties &memoryProperties,
64 const VkMemoryRequirements &memoryRequirements,
65 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
66 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
67 uint32_t *typeIndexOut)
68 {
69 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
70 {
71 ASSERT(memoryIndex < memoryProperties.memoryTypeCount);
72
73 if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
74 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
75 {
76 *memoryPropertyFlagsOut = memoryProperties.memoryTypes[memoryIndex].propertyFlags;
77 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
78 return true;
79 }
80 }
81
82 return false;
83 }
84
FindAndAllocateCompatibleMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)85 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
86 vk::MemoryAllocationType memoryAllocationType,
87 const vk::MemoryProperties &memoryProperties,
88 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
89 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
90 const VkMemoryRequirements &memoryRequirements,
91 const void *extraAllocationInfo,
92 uint32_t *memoryTypeIndexOut,
93 vk::DeviceMemory *deviceMemoryOut)
94 {
95 VkDevice device = context->getDevice();
96
97 ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(
98 context, memoryRequirements, requestedMemoryPropertyFlags, (extraAllocationInfo != nullptr),
99 memoryPropertyFlagsOut, memoryTypeIndexOut));
100
101 VkMemoryAllocateInfo allocInfo = {};
102 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
103 allocInfo.pNext = extraAllocationInfo;
104 allocInfo.memoryTypeIndex = *memoryTypeIndexOut;
105 allocInfo.allocationSize = memoryRequirements.size;
106
107 // Add the new allocation for tracking.
108 RendererVk *renderer = context->getRenderer();
109 renderer->getMemoryAllocationTracker()->setPendingMemoryAlloc(
110 memoryAllocationType, allocInfo.allocationSize, *memoryTypeIndexOut);
111
112 // If the initial allocation fails, it is possible to retry the allocation after cleaning the
113 // garbage.
114 VkResult result;
115 bool anyBatchCleaned = false;
116 uint32_t batchesWaitedAndCleaned = 0;
117
118 do
119 {
120 result = deviceMemoryOut->allocate(device, allocInfo);
121 if (result != VK_SUCCESS)
122 {
123 ANGLE_TRY(renderer->finishOneCommandBatchAndCleanup(context, &anyBatchCleaned));
124
125 if (anyBatchCleaned)
126 {
127 batchesWaitedAndCleaned++;
128 }
129 }
130 } while (result != VK_SUCCESS && anyBatchCleaned);
131
132 if (batchesWaitedAndCleaned > 0)
133 {
134 INFO() << "Initial allocation failed. Waited for " << batchesWaitedAndCleaned
135 << " commands to finish and free garbage | Allocation result: "
136 << ((result == VK_SUCCESS) ? "SUCCESS" : "FAIL");
137 }
138
139 ANGLE_VK_CHECK(context, result == VK_SUCCESS, result);
140
141 renderer->onMemoryAlloc(memoryAllocationType, allocInfo.allocationSize, *memoryTypeIndexOut,
142 deviceMemoryOut->getHandle());
143
144 return angle::Result::Continue;
145 }
146
147 template <typename T>
148 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
149 vk::MemoryAllocationType memoryAllocationType,
150 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
151 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
152 const VkMemoryRequirements &memoryRequirements,
153 const void *extraAllocationInfo,
154 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
155 T *bufferOrImage,
156 uint32_t *memoryTypeIndexOut,
157 vk::DeviceMemory *deviceMemoryOut);
158
159 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Image * image,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)160 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
161 vk::MemoryAllocationType memoryAllocationType,
162 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
163 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
164 const VkMemoryRequirements &memoryRequirements,
165 const void *extraAllocationInfo,
166 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
167 vk::Image *image,
168 uint32_t *memoryTypeIndexOut,
169 vk::DeviceMemory *deviceMemoryOut)
170 {
171 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
172
173 ANGLE_TRY(FindAndAllocateCompatibleMemory(context, memoryAllocationType, memoryProperties,
174 requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
175 memoryRequirements, extraAllocationInfo,
176 memoryTypeIndexOut, deviceMemoryOut));
177
178 if (extraBindInfo)
179 {
180 VkBindImageMemoryInfoKHR bindInfo = {};
181 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
182 bindInfo.pNext = extraBindInfo;
183 bindInfo.image = image->getHandle();
184 bindInfo.memory = deviceMemoryOut->getHandle();
185 bindInfo.memoryOffset = 0;
186
187 ANGLE_VK_TRY(context, image->bindMemory2(context->getDevice(), bindInfo));
188 }
189 else
190 {
191 ANGLE_VK_TRY(context, image->bindMemory(context->getDevice(), *deviceMemoryOut));
192 }
193
194 return angle::Result::Continue;
195 }
196
197 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Buffer * buffer,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut)198 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
199 vk::MemoryAllocationType memoryAllocationType,
200 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
201 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
202 const VkMemoryRequirements &memoryRequirements,
203 const void *extraAllocationInfo,
204 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
205 vk::Buffer *buffer,
206 uint32_t *memoryTypeIndexOut,
207 vk::DeviceMemory *deviceMemoryOut)
208 {
209 ASSERT(extraBindInfo == nullptr);
210
211 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
212
213 ANGLE_TRY(FindAndAllocateCompatibleMemory(context, memoryAllocationType, memoryProperties,
214 requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
215 memoryRequirements, extraAllocationInfo,
216 memoryTypeIndexOut, deviceMemoryOut));
217 ANGLE_VK_TRY(context, buffer->bindMemory(context->getDevice(), *deviceMemoryOut, 0));
218 return angle::Result::Continue;
219 }
220
221 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,uint32_t * memoryTypeIndexOut,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)222 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
223 vk::MemoryAllocationType memoryAllocationType,
224 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
225 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
226 const void *extraAllocationInfo,
227 T *bufferOrImage,
228 uint32_t *memoryTypeIndexOut,
229 vk::DeviceMemory *deviceMemoryOut,
230 VkDeviceSize *sizeOut)
231 {
232 // Call driver to determine memory requirements.
233 VkMemoryRequirements memoryRequirements;
234 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
235
236 ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
237 context, memoryAllocationType, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
238 memoryRequirements, extraAllocationInfo, nullptr, bufferOrImage, memoryTypeIndexOut,
239 deviceMemoryOut));
240
241 *sizeOut = memoryRequirements.size;
242
243 return angle::Result::Continue;
244 }
245
246 // Unified layer that includes full validation layer stack
247 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
248 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
249 const char *kVkValidationLayerNames[] = {
250 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
251 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
252 "VK_LAYER_GOOGLE_unique_objects"};
253
254 } // anonymous namespace
255
VulkanResultString(VkResult result)256 const char *VulkanResultString(VkResult result)
257 {
258 switch (result)
259 {
260 case VK_SUCCESS:
261 return "Command successfully completed";
262 case VK_NOT_READY:
263 return "A fence or query has not yet completed";
264 case VK_TIMEOUT:
265 return "A wait operation has not completed in the specified time";
266 case VK_EVENT_SET:
267 return "An event is signaled";
268 case VK_EVENT_RESET:
269 return "An event is unsignaled";
270 case VK_INCOMPLETE:
271 return "A return array was too small for the result";
272 case VK_SUBOPTIMAL_KHR:
273 return "A swapchain no longer matches the surface properties exactly, but can still be "
274 "used to present to the surface successfully";
275 case VK_ERROR_OUT_OF_HOST_MEMORY:
276 return "A host memory allocation has failed";
277 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
278 return "A device memory allocation has failed";
279 case VK_ERROR_INITIALIZATION_FAILED:
280 return "Initialization of an object could not be completed for implementation-specific "
281 "reasons";
282 case VK_ERROR_DEVICE_LOST:
283 return "The logical or physical device has been lost";
284 case VK_ERROR_MEMORY_MAP_FAILED:
285 return "Mapping of a memory object has failed";
286 case VK_ERROR_LAYER_NOT_PRESENT:
287 return "A requested layer is not present or could not be loaded";
288 case VK_ERROR_EXTENSION_NOT_PRESENT:
289 return "A requested extension is not supported";
290 case VK_ERROR_FEATURE_NOT_PRESENT:
291 return "A requested feature is not supported";
292 case VK_ERROR_INCOMPATIBLE_DRIVER:
293 return "The requested version of Vulkan is not supported by the driver or is otherwise "
294 "incompatible for implementation-specific reasons";
295 case VK_ERROR_TOO_MANY_OBJECTS:
296 return "Too many objects of the type have already been created";
297 case VK_ERROR_FORMAT_NOT_SUPPORTED:
298 return "A requested format is not supported on this device";
299 case VK_ERROR_SURFACE_LOST_KHR:
300 return "A surface is no longer available";
301 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
302 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
303 "non-Vulkan API";
304 case VK_ERROR_OUT_OF_DATE_KHR:
305 return "A surface has changed in such a way that it is no longer compatible with the "
306 "swapchain";
307 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
308 return "The display used by a swapchain does not use the same presentable image "
309 "layout, or is incompatible in a way that prevents sharing an image";
310 case VK_ERROR_VALIDATION_FAILED_EXT:
311 return "The validation layers detected invalid API usage";
312 case VK_ERROR_INVALID_SHADER_NV:
313 return "Invalid Vulkan shader was generated";
314 case VK_ERROR_OUT_OF_POOL_MEMORY:
315 return "A pool memory allocation has failed";
316 case VK_ERROR_FRAGMENTED_POOL:
317 return "A pool allocation has failed due to fragmentation of the pool's memory";
318 case VK_ERROR_INVALID_EXTERNAL_HANDLE:
319 return "An external handle is not a valid handle of the specified type";
320 default:
321 return "Unknown vulkan error code";
322 }
323 }
324
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)325 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
326 bool mustHaveLayers,
327 VulkanLayerVector *enabledLayerNames)
328 {
329
330 ASSERT(enabledLayerNames);
331 for (const auto &layerProp : layerProps)
332 {
333 std::string layerPropLayerName = std::string(layerProp.layerName);
334
335 // Favor unified Khronos layer, but fallback to standard validation
336 if (layerPropLayerName == kVkKhronosValidationLayerName)
337 {
338 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
339 continue;
340 }
341 else if (layerPropLayerName == kVkStandardValidationLayerName)
342 {
343 enabledLayerNames->push_back(kVkStandardValidationLayerName);
344 continue;
345 }
346
347 for (const char *validationLayerName : kVkValidationLayerNames)
348 {
349 if (layerPropLayerName == validationLayerName)
350 {
351 enabledLayerNames->push_back(validationLayerName);
352 break;
353 }
354 }
355 }
356
357 if (enabledLayerNames->size() == 0)
358 {
359 // Generate an error if the layers were explicitly requested, warning otherwise.
360 if (mustHaveLayers)
361 {
362 ERR() << "Vulkan validation layers are missing.";
363 }
364 else
365 {
366 WARN() << "Vulkan validation layers are missing.";
367 }
368
369 return false;
370 }
371
372 return true;
373 }
374
375 namespace vk
376 {
377 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
378 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
379
GetDepthStencilAspectFlags(const angle::Format & format)380 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
381 {
382 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
383 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
384 }
385
GetFormatAspectFlags(const angle::Format & format)386 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
387 {
388 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
389 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
390 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
391 // channels.
392 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
393 }
394
395 // Context implementation.
Context(RendererVk * renderer)396 Context::Context(RendererVk *renderer) : mRenderer(renderer), mPerfCounters{} {}
397
~Context()398 Context::~Context() {}
399
getDevice() const400 VkDevice Context::getDevice() const
401 {
402 return mRenderer->getDevice();
403 }
404
getFeatures() const405 const angle::FeaturesVk &Context::getFeatures() const
406 {
407 return mRenderer->getFeatures();
408 }
409
410 // MemoryProperties implementation.
MemoryProperties()411 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
412
init(VkPhysicalDevice physicalDevice)413 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
414 {
415 ASSERT(mMemoryProperties.memoryTypeCount == 0);
416 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
417 ASSERT(mMemoryProperties.memoryTypeCount > 0);
418 }
419
destroy()420 void MemoryProperties::destroy()
421 {
422 mMemoryProperties = {};
423 }
424
hasLazilyAllocatedMemory() const425 bool MemoryProperties::hasLazilyAllocatedMemory() const
426 {
427 for (uint32_t typeIndex = 0; typeIndex < mMemoryProperties.memoryTypeCount; ++typeIndex)
428 {
429 const VkMemoryType &memoryType = mMemoryProperties.memoryTypes[typeIndex];
430 if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
431 {
432 return true;
433 }
434 }
435 return false;
436 }
437
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,bool isExternalMemory,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const438 angle::Result MemoryProperties::findCompatibleMemoryIndex(
439 Context *context,
440 const VkMemoryRequirements &memoryRequirements,
441 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
442 bool isExternalMemory,
443 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
444 uint32_t *typeIndexOut) const
445 {
446 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
447
448 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
449 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
450 // TODO(jmadill): Determine if it is possible to cache indexes.
451 // TODO(jmadill): More efficient memory allocation.
452 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, requestedMemoryPropertyFlags,
453 memoryPropertyFlagsOut, typeIndexOut))
454 {
455 return angle::Result::Continue;
456 }
457
458 // We did not find a compatible memory type. If the caller wanted a host visible memory, just
459 // return the memory index with fallback, guaranteed, memory flags.
460 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
461 {
462 // The Vulkan spec says the following -
463 // There must be at least one memory type with both the
464 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
465 // bits set in its propertyFlags
466 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
467 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
468
469 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, fallbackMemoryPropertyFlags,
470 memoryPropertyFlagsOut, typeIndexOut))
471 {
472 return angle::Result::Continue;
473 }
474 }
475
476 // We did not find a compatible memory type. When importing external memory, there may be
477 // additional restrictions on memoryType. Find the first available memory type that Vulkan
478 // driver decides being compatible with external memory import.
479 if (isExternalMemory)
480 {
481 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, 0, memoryPropertyFlagsOut,
482 typeIndexOut))
483 {
484 return angle::Result::Continue;
485 }
486 }
487
488 // TODO(jmadill): Add error message to error.
489 context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
490 return angle::Result::Stop;
491 }
492
493 // StagingBuffer implementation.
StagingBuffer()494 StagingBuffer::StagingBuffer() : mSize(0) {}
495
destroy(RendererVk * renderer)496 void StagingBuffer::destroy(RendererVk *renderer)
497 {
498 VkDevice device = renderer->getDevice();
499 mBuffer.destroy(device);
500 mAllocation.destroy(renderer->getAllocator());
501 mSize = 0;
502 }
503
init(Context * context,VkDeviceSize size,StagingUsage usage)504 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
505 {
506 VkBufferCreateInfo createInfo = {};
507 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
508 createInfo.flags = 0;
509 createInfo.size = size;
510 createInfo.usage = GetStagingBufferUsageFlags(usage);
511 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
512 createInfo.queueFamilyIndexCount = 0;
513 createInfo.pQueueFamilyIndices = nullptr;
514
515 VkMemoryPropertyFlags preferredFlags = 0;
516 VkMemoryPropertyFlags requiredFlags =
517 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
518
519 RendererVk *renderer = context->getRenderer();
520 const Allocator &allocator = renderer->getAllocator();
521
522 uint32_t memoryTypeIndex = 0;
523 ANGLE_VK_TRY(context,
524 allocator.createBuffer(createInfo, requiredFlags, preferredFlags,
525 renderer->getFeatures().persistentlyMappedBuffers.enabled,
526 &memoryTypeIndex, &mBuffer, &mAllocation));
527 mSize = static_cast<size_t>(size);
528
529 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
530 // invalid values ensures our testing doesn't assume zero-initialized memory.
531 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
532 {
533 ANGLE_TRY(InitMappableAllocation(context, allocator, &mAllocation, size, kNonZeroInitValue,
534 requiredFlags));
535 }
536
537 return angle::Result::Continue;
538 }
539
release(ContextVk * contextVk)540 void StagingBuffer::release(ContextVk *contextVk)
541 {
542 contextVk->addGarbage(&mBuffer);
543 contextVk->addGarbage(&mAllocation);
544 }
545
collectGarbage(RendererVk * renderer,const QueueSerial & queueSerial)546 void StagingBuffer::collectGarbage(RendererVk *renderer, const QueueSerial &queueSerial)
547 {
548 GarbageList garbageList;
549 garbageList.emplace_back(GetGarbage(&mBuffer));
550 garbageList.emplace_back(GetGarbage(&mAllocation));
551
552 ResourceUse use(queueSerial);
553 renderer->collectGarbage(use, std::move(garbageList));
554 }
555
InitMappableAllocation(Context * context,const Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)556 angle::Result InitMappableAllocation(Context *context,
557 const Allocator &allocator,
558 Allocation *allocation,
559 VkDeviceSize size,
560 int value,
561 VkMemoryPropertyFlags memoryPropertyFlags)
562 {
563 uint8_t *mapPointer;
564 ANGLE_VK_TRY(context, allocation->map(allocator, &mapPointer));
565 memset(mapPointer, value, static_cast<size_t>(size));
566
567 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
568 {
569 allocation->flush(allocator, 0, size);
570 }
571
572 allocation->unmap(allocator);
573
574 return angle::Result::Continue;
575 }
576
AllocateBufferMemory(Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)577 angle::Result AllocateBufferMemory(Context *context,
578 vk::MemoryAllocationType memoryAllocationType,
579 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
580 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
581 const void *extraAllocationInfo,
582 Buffer *buffer,
583 uint32_t *memoryTypeIndexOut,
584 DeviceMemory *deviceMemoryOut,
585 VkDeviceSize *sizeOut)
586 {
587 return AllocateBufferOrImageMemory(context, memoryAllocationType, requestedMemoryPropertyFlags,
588 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
589 memoryTypeIndexOut, deviceMemoryOut, sizeOut);
590 }
591
AllocateImageMemory(Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Image * image,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)592 angle::Result AllocateImageMemory(Context *context,
593 vk::MemoryAllocationType memoryAllocationType,
594 VkMemoryPropertyFlags memoryPropertyFlags,
595 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
596 const void *extraAllocationInfo,
597 Image *image,
598 uint32_t *memoryTypeIndexOut,
599 DeviceMemory *deviceMemoryOut,
600 VkDeviceSize *sizeOut)
601 {
602 return AllocateBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
603 memoryPropertyFlagsOut, extraAllocationInfo, image,
604 memoryTypeIndexOut, deviceMemoryOut, sizeOut);
605 }
606
AllocateImageMemoryWithRequirements(Context * context,vk::MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,Image * image,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut)607 angle::Result AllocateImageMemoryWithRequirements(
608 Context *context,
609 vk::MemoryAllocationType memoryAllocationType,
610 VkMemoryPropertyFlags memoryPropertyFlags,
611 const VkMemoryRequirements &memoryRequirements,
612 const void *extraAllocationInfo,
613 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
614 Image *image,
615 uint32_t *memoryTypeIndexOut,
616 DeviceMemory *deviceMemoryOut)
617 {
618 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
619 return AllocateAndBindBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
620 &memoryPropertyFlagsOut, memoryRequirements,
621 extraAllocationInfo, extraBindInfo, image,
622 memoryTypeIndexOut, deviceMemoryOut);
623 }
624
AllocateBufferMemoryWithRequirements(Context * context,MemoryAllocationType memoryAllocationType,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Buffer * buffer,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * memoryTypeIndexOut,DeviceMemory * deviceMemoryOut)625 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
626 MemoryAllocationType memoryAllocationType,
627 VkMemoryPropertyFlags memoryPropertyFlags,
628 const VkMemoryRequirements &memoryRequirements,
629 const void *extraAllocationInfo,
630 Buffer *buffer,
631 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
632 uint32_t *memoryTypeIndexOut,
633 DeviceMemory *deviceMemoryOut)
634 {
635 return AllocateAndBindBufferOrImageMemory(context, memoryAllocationType, memoryPropertyFlags,
636 memoryPropertyFlagsOut, memoryRequirements,
637 extraAllocationInfo, nullptr, buffer,
638 memoryTypeIndexOut, deviceMemoryOut);
639 }
640
InitShaderModule(Context * context,ShaderModule * shaderModule,const uint32_t * shaderCode,size_t shaderCodeSize)641 angle::Result InitShaderModule(Context *context,
642 ShaderModule *shaderModule,
643 const uint32_t *shaderCode,
644 size_t shaderCodeSize)
645 {
646 VkShaderModuleCreateInfo createInfo = {};
647 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
648 createInfo.flags = 0;
649 createInfo.codeSize = shaderCodeSize;
650 createInfo.pCode = shaderCode;
651
652 ANGLE_VK_TRY(context, shaderModule->init(context->getDevice(), createInfo));
653 return angle::Result::Continue;
654 }
655
Get2DTextureType(uint32_t layerCount,GLint samples)656 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
657 {
658 if (layerCount > 1)
659 {
660 if (samples > 1)
661 {
662 return gl::TextureType::_2DMultisampleArray;
663 }
664 else
665 {
666 return gl::TextureType::_2DArray;
667 }
668 }
669 else
670 {
671 if (samples > 1)
672 {
673 return gl::TextureType::_2DMultisample;
674 }
675 else
676 {
677 return gl::TextureType::_2D;
678 }
679 }
680 }
681
GarbageObject()682 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
683
GarbageObject(HandleType handleType,GarbageHandle handle)684 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
685 : mHandleType(handleType), mHandle(handle)
686 {}
687
GarbageObject(GarbageObject && other)688 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
689 {
690 *this = std::move(other);
691 }
692
operator =(GarbageObject && rhs)693 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
694 {
695 std::swap(mHandle, rhs.mHandle);
696 std::swap(mHandleType, rhs.mHandleType);
697 return *this;
698 }
699
700 // GarbageObject implementation
701 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
702 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(RendererVk * renderer)703 void GarbageObject::destroy(RendererVk *renderer)
704 {
705 ANGLE_TRACE_EVENT0("gpu.angle", "GarbageObject::destroy");
706 VkDevice device = renderer->getDevice();
707 switch (mHandleType)
708 {
709 case HandleType::Semaphore:
710 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
711 break;
712 case HandleType::CommandBuffer:
713 // Command buffers are pool allocated.
714 UNREACHABLE();
715 break;
716 case HandleType::Event:
717 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
718 break;
719 case HandleType::Fence:
720 vkDestroyFence(device, (VkFence)mHandle, nullptr);
721 break;
722 case HandleType::DeviceMemory:
723 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
724 break;
725 case HandleType::Buffer:
726 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
727 break;
728 case HandleType::BufferView:
729 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
730 break;
731 case HandleType::Image:
732 vkDestroyImage(device, (VkImage)mHandle, nullptr);
733 break;
734 case HandleType::ImageView:
735 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
736 break;
737 case HandleType::ShaderModule:
738 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
739 break;
740 case HandleType::PipelineLayout:
741 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
742 break;
743 case HandleType::RenderPass:
744 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
745 break;
746 case HandleType::Pipeline:
747 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
748 break;
749 case HandleType::DescriptorSetLayout:
750 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
751 break;
752 case HandleType::Sampler:
753 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
754 break;
755 case HandleType::DescriptorPool:
756 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
757 break;
758 case HandleType::Framebuffer:
759 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
760 break;
761 case HandleType::CommandPool:
762 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
763 break;
764 case HandleType::QueryPool:
765 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
766 break;
767 case HandleType::Allocation:
768 vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
769 break;
770 default:
771 UNREACHABLE();
772 break;
773 }
774
775 renderer->onDeallocateHandle(mHandleType);
776 }
777
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)778 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
779 {
780 static constexpr angle::ColorF kLabelColors[6] = {
781 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
782 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
783 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
784 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
785 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
786 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
787 };
788
789 int colorIndex = source - GL_DEBUG_SOURCE_API;
790 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
791
792 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
793 label->pNext = nullptr;
794 label->pLabelName = marker;
795 kLabelColors[colorIndex].writeData(label->color);
796 }
797
SetDebugUtilsObjectName(ContextVk * contextVk,VkObjectType objectType,uint64_t handle,const std::string & label)798 angle::Result SetDebugUtilsObjectName(ContextVk *contextVk,
799 VkObjectType objectType,
800 uint64_t handle,
801 const std::string &label)
802 {
803 RendererVk *renderer = contextVk->getRenderer();
804
805 VkDebugUtilsObjectNameInfoEXT objectNameInfo = {};
806 objectNameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
807 objectNameInfo.objectType = objectType;
808 objectNameInfo.objectHandle = handle;
809 objectNameInfo.pObjectName = label.c_str();
810
811 if (vkSetDebugUtilsObjectNameEXT)
812 {
813 ANGLE_VK_TRY(contextVk,
814 vkSetDebugUtilsObjectNameEXT(renderer->getDevice(), &objectNameInfo));
815 }
816 return angle::Result::Continue;
817 }
818
819 // ClearValuesArray implementation.
ClearValuesArray()820 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
821
822 ClearValuesArray::~ClearValuesArray() = default;
823
824 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
825
826 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
827
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)828 void ClearValuesArray::store(uint32_t index,
829 VkImageAspectFlags aspectFlags,
830 const VkClearValue &clearValue)
831 {
832 ASSERT(aspectFlags != 0);
833
834 // We do this double if to handle the packed depth-stencil case.
835 if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
836 {
837 // Ensure for packed DS we're writing to the depth index.
838 ASSERT(index == kUnpackedDepthIndex ||
839 (index == kUnpackedStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
840
841 storeNoDepthStencil(kUnpackedStencilIndex, clearValue);
842 }
843
844 if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
845 {
846 storeNoDepthStencil(index, clearValue);
847 }
848 }
849
storeNoDepthStencil(uint32_t index,const VkClearValue & clearValue)850 void ClearValuesArray::storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue)
851 {
852 mValues[index] = clearValue;
853 mEnabled.set(index);
854 }
855
getColorMask() const856 gl::DrawBufferMask ClearValuesArray::getColorMask() const
857 {
858 return gl::DrawBufferMask(mEnabled.bits() & kUnpackedColorBuffersMask);
859 }
860
861 // ResourceSerialFactory implementation.
ResourceSerialFactory()862 ResourceSerialFactory::ResourceSerialFactory() : mCurrentUniqueSerial(1) {}
863
~ResourceSerialFactory()864 ResourceSerialFactory::~ResourceSerialFactory() {}
865
issueSerial()866 uint32_t ResourceSerialFactory::issueSerial()
867 {
868 uint32_t newSerial = ++mCurrentUniqueSerial;
869 // make sure serial does not wrap
870 ASSERT(newSerial > 0);
871 return newSerial;
872 }
873
874 #define ANGLE_DEFINE_GEN_VK_SERIAL(Type) \
875 Type##Serial ResourceSerialFactory::generate##Type##Serial() \
876 { \
877 return Type##Serial(issueSerial()); \
878 }
879
ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)880 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)
881
882 void ClampViewport(VkViewport *viewport)
883 {
884 // 0-sized viewports are invalid in Vulkan.
885 ASSERT(viewport);
886 if (viewport->width == 0.0f)
887 {
888 viewport->width = 1.0f;
889 }
890 if (viewport->height == 0.0f)
891 {
892 viewport->height = 1.0f;
893 }
894 }
895
ApplyPipelineCreationFeedback(Context * context,const VkPipelineCreationFeedback & feedback)896 void ApplyPipelineCreationFeedback(Context *context, const VkPipelineCreationFeedback &feedback)
897 {
898 const bool cacheHit =
899 (feedback.flags & VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT) != 0;
900
901 angle::VulkanPerfCounters &perfCounters = context->getPerfCounters();
902
903 if (cacheHit)
904 {
905 ++perfCounters.pipelineCreationCacheHits;
906 perfCounters.pipelineCreationTotalCacheHitsDurationNs += feedback.duration;
907 }
908 else
909 {
910 ++perfCounters.pipelineCreationCacheMisses;
911 perfCounters.pipelineCreationTotalCacheMissesDurationNs += feedback.duration;
912 }
913 }
914
hash() const915 size_t MemoryAllocInfoMapKey::hash() const
916 {
917 return angle::ComputeGenericHash(*this);
918 }
919 } // namespace vk
920
921 #if !defined(ANGLE_SHARED_LIBVULKAN)
922 // VK_EXT_debug_utils
923 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
924 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
925 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
926 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
927 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
928 PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = nullptr;
929
930 // VK_KHR_get_physical_device_properties2
931 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
932 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = nullptr;
933 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = nullptr;
934
935 // VK_KHR_external_semaphore_fd
936 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
937
938 // VK_EXT_host_query_reset
939 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = nullptr;
940
941 // VK_EXT_transform_feedback
942 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
943 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = nullptr;
944 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = nullptr;
945 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = nullptr;
946 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = nullptr;
947 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = nullptr;
948
949 // VK_KHR_get_memory_requirements2
950 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
951 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = nullptr;
952
953 // VK_KHR_bind_memory2
954 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = nullptr;
955 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = nullptr;
956
957 // VK_KHR_external_fence_capabilities
958 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
959 nullptr;
960
961 // VK_KHR_external_fence_fd
962 PFN_vkGetFenceFdKHR vkGetFenceFdKHR = nullptr;
963 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
964
965 // VK_KHR_external_semaphore_capabilities
966 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
967 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
968
969 // VK_KHR_sampler_ycbcr_conversion
970 PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = nullptr;
971 PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = nullptr;
972
973 // VK_KHR_create_renderpass2
974 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = nullptr;
975
976 # if defined(ANGLE_PLATFORM_FUCHSIA)
977 // VK_FUCHSIA_imagepipe_surface
978 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
979 # endif
980
981 # if defined(ANGLE_PLATFORM_ANDROID)
982 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
983 nullptr;
984 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
985 # endif
986
987 # if defined(ANGLE_PLATFORM_GGP)
988 PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = nullptr;
989 # endif
990
991 # define GET_INSTANCE_FUNC(vkName) \
992 do \
993 { \
994 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
995 ASSERT(vkName); \
996 } while (0)
997
998 # define GET_DEVICE_FUNC(vkName) \
999 do \
1000 { \
1001 vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
1002 ASSERT(vkName); \
1003 } while (0)
1004
1005 // VK_KHR_shared_presentable_image
1006 PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = nullptr;
1007
1008 // VK_EXT_extended_dynamic_state
1009 PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = nullptr;
1010 PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = nullptr;
1011 PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = nullptr;
1012 PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = nullptr;
1013 PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = nullptr;
1014 PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = nullptr;
1015 PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = nullptr;
1016 PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = nullptr;
1017 PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = nullptr;
1018 PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = nullptr;
1019 PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = nullptr;
1020 PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = nullptr;
1021
1022 // VK_EXT_extended_dynamic_state2
1023 PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = nullptr;
1024 PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = nullptr;
1025 PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = nullptr;
1026 PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = nullptr;
1027 PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = nullptr;
1028
1029 // VK_KHR_fragment_shading_rate
1030 PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = nullptr;
1031 PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = nullptr;
1032
1033 // VK_GOOGLE_display_timing
1034 PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = nullptr;
1035
InitDebugUtilsEXTFunctions(VkInstance instance)1036 void InitDebugUtilsEXTFunctions(VkInstance instance)
1037 {
1038 GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
1039 GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
1040 GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
1041 GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
1042 GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
1043 GET_INSTANCE_FUNC(vkSetDebugUtilsObjectNameEXT);
1044 }
1045
InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)1046 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)
1047 {
1048 GET_INSTANCE_FUNC(vkGetPhysicalDeviceProperties2KHR);
1049 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFeatures2KHR);
1050 GET_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR);
1051 }
1052
InitTransformFeedbackEXTFunctions(VkDevice device)1053 void InitTransformFeedbackEXTFunctions(VkDevice device)
1054 {
1055 GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
1056 GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
1057 GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
1058 GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
1059 GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
1060 GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
1061 }
1062
1063 // VK_KHR_sampler_ycbcr_conversion
InitSamplerYcbcrKHRFunctions(VkDevice device)1064 void InitSamplerYcbcrKHRFunctions(VkDevice device)
1065 {
1066 GET_DEVICE_FUNC(vkCreateSamplerYcbcrConversionKHR);
1067 GET_DEVICE_FUNC(vkDestroySamplerYcbcrConversionKHR);
1068 }
1069
1070 // VK_KHR_create_renderpass2
InitRenderPass2KHRFunctions(VkDevice device)1071 void InitRenderPass2KHRFunctions(VkDevice device)
1072 {
1073 GET_DEVICE_FUNC(vkCreateRenderPass2KHR);
1074 }
1075
1076 # if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)1077 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
1078 {
1079 GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
1080 }
1081 # endif
1082
1083 # if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)1084 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)
1085 {
1086 GET_INSTANCE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
1087 GET_INSTANCE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
1088 }
1089 # endif
1090
1091 # if defined(ANGLE_PLATFORM_GGP)
InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)1092 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)
1093 {
1094 GET_INSTANCE_FUNC(vkCreateStreamDescriptorSurfaceGGP);
1095 }
1096 # endif // defined(ANGLE_PLATFORM_GGP)
1097
InitExternalSemaphoreFdFunctions(VkInstance instance)1098 void InitExternalSemaphoreFdFunctions(VkInstance instance)
1099 {
1100 GET_INSTANCE_FUNC(vkImportSemaphoreFdKHR);
1101 }
1102
InitHostQueryResetFunctions(VkDevice device)1103 void InitHostQueryResetFunctions(VkDevice device)
1104 {
1105 GET_DEVICE_FUNC(vkResetQueryPoolEXT);
1106 }
1107
1108 // VK_KHR_get_memory_requirements2
InitGetMemoryRequirements2KHRFunctions(VkDevice device)1109 void InitGetMemoryRequirements2KHRFunctions(VkDevice device)
1110 {
1111 GET_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR);
1112 GET_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR);
1113 }
1114
1115 // VK_KHR_bind_memory2
InitBindMemory2KHRFunctions(VkDevice device)1116 void InitBindMemory2KHRFunctions(VkDevice device)
1117 {
1118 GET_DEVICE_FUNC(vkBindBufferMemory2KHR);
1119 GET_DEVICE_FUNC(vkBindImageMemory2KHR);
1120 }
1121
1122 // VK_KHR_external_fence_capabilities
InitExternalFenceCapabilitiesFunctions(VkInstance instance)1123 void InitExternalFenceCapabilitiesFunctions(VkInstance instance)
1124 {
1125 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalFencePropertiesKHR);
1126 }
1127
1128 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkInstance instance)1129 void InitExternalFenceFdFunctions(VkInstance instance)
1130 {
1131 GET_INSTANCE_FUNC(vkGetFenceFdKHR);
1132 GET_INSTANCE_FUNC(vkImportFenceFdKHR);
1133 }
1134
1135 // VK_KHR_external_semaphore_capabilities
InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)1136 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)
1137 {
1138 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalSemaphorePropertiesKHR);
1139 }
1140
1141 // VK_KHR_shared_presentable_image
InitGetSwapchainStatusKHRFunctions(VkDevice device)1142 void InitGetSwapchainStatusKHRFunctions(VkDevice device)
1143 {
1144 GET_DEVICE_FUNC(vkGetSwapchainStatusKHR);
1145 }
1146
1147 // VK_EXT_extended_dynamic_state
InitExtendedDynamicStateEXTFunctions(VkDevice device)1148 void InitExtendedDynamicStateEXTFunctions(VkDevice device)
1149 {
1150 GET_DEVICE_FUNC(vkCmdBindVertexBuffers2EXT);
1151 GET_DEVICE_FUNC(vkCmdSetCullModeEXT);
1152 GET_DEVICE_FUNC(vkCmdSetDepthBoundsTestEnableEXT);
1153 GET_DEVICE_FUNC(vkCmdSetDepthCompareOpEXT);
1154 GET_DEVICE_FUNC(vkCmdSetDepthTestEnableEXT);
1155 GET_DEVICE_FUNC(vkCmdSetDepthWriteEnableEXT);
1156 GET_DEVICE_FUNC(vkCmdSetFrontFaceEXT);
1157 GET_DEVICE_FUNC(vkCmdSetPrimitiveTopologyEXT);
1158 GET_DEVICE_FUNC(vkCmdSetScissorWithCountEXT);
1159 GET_DEVICE_FUNC(vkCmdSetStencilOpEXT);
1160 GET_DEVICE_FUNC(vkCmdSetStencilTestEnableEXT);
1161 GET_DEVICE_FUNC(vkCmdSetViewportWithCountEXT);
1162 }
1163
1164 // VK_EXT_extended_dynamic_state2
InitExtendedDynamicState2EXTFunctions(VkDevice device)1165 void InitExtendedDynamicState2EXTFunctions(VkDevice device)
1166 {
1167 GET_DEVICE_FUNC(vkCmdSetDepthBiasEnableEXT);
1168 GET_DEVICE_FUNC(vkCmdSetLogicOpEXT);
1169 GET_DEVICE_FUNC(vkCmdSetPatchControlPointsEXT);
1170 GET_DEVICE_FUNC(vkCmdSetPrimitiveRestartEnableEXT);
1171 GET_DEVICE_FUNC(vkCmdSetRasterizerDiscardEnableEXT);
1172 }
1173
1174 // VK_KHR_fragment_shading_rate
InitFragmentShadingRateKHRInstanceFunction(VkInstance instance)1175 void InitFragmentShadingRateKHRInstanceFunction(VkInstance instance)
1176 {
1177 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFragmentShadingRatesKHR);
1178 }
1179
InitFragmentShadingRateKHRDeviceFunction(VkDevice device)1180 void InitFragmentShadingRateKHRDeviceFunction(VkDevice device)
1181 {
1182 GET_DEVICE_FUNC(vkCmdSetFragmentShadingRateKHR);
1183 }
1184
1185 // VK_GOOGLE_display_timing
InitGetPastPresentationTimingGoogleFunction(VkDevice device)1186 void InitGetPastPresentationTimingGoogleFunction(VkDevice device)
1187 {
1188 GET_DEVICE_FUNC(vkGetPastPresentationTimingGOOGLE);
1189 }
1190
1191 # undef GET_INSTANCE_FUNC
1192 # undef GET_DEVICE_FUNC
1193
1194 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1195
1196 #define ASSIGN_FROM_CORE(vkName, EXT) \
1197 do \
1198 { \
1199 /* The core entry point must be present */ \
1200 ASSERT(vkName != nullptr); \
1201 vkName##EXT = vkName; \
1202 } while (0)
1203
InitGetPhysicalDeviceProperties2KHRFunctionsFromCore()1204 void InitGetPhysicalDeviceProperties2KHRFunctionsFromCore()
1205 {
1206 ASSIGN_FROM_CORE(vkGetPhysicalDeviceProperties2, KHR);
1207 ASSIGN_FROM_CORE(vkGetPhysicalDeviceFeatures2, KHR);
1208 ASSIGN_FROM_CORE(vkGetPhysicalDeviceMemoryProperties2, KHR);
1209 }
1210
InitExternalFenceCapabilitiesFunctionsFromCore()1211 void InitExternalFenceCapabilitiesFunctionsFromCore()
1212 {
1213 ASSIGN_FROM_CORE(vkGetPhysicalDeviceExternalFenceProperties, KHR);
1214 }
1215
InitExternalSemaphoreCapabilitiesFunctionsFromCore()1216 void InitExternalSemaphoreCapabilitiesFunctionsFromCore()
1217 {
1218 ASSIGN_FROM_CORE(vkGetPhysicalDeviceExternalSemaphoreProperties, KHR);
1219 }
1220
InitSamplerYcbcrKHRFunctionsFromCore()1221 void InitSamplerYcbcrKHRFunctionsFromCore()
1222 {
1223 ASSIGN_FROM_CORE(vkCreateSamplerYcbcrConversion, KHR);
1224 ASSIGN_FROM_CORE(vkDestroySamplerYcbcrConversion, KHR);
1225 }
1226
InitGetMemoryRequirements2KHRFunctionsFromCore()1227 void InitGetMemoryRequirements2KHRFunctionsFromCore()
1228 {
1229 ASSIGN_FROM_CORE(vkGetBufferMemoryRequirements2, KHR);
1230 ASSIGN_FROM_CORE(vkGetImageMemoryRequirements2, KHR);
1231 }
1232
InitBindMemory2KHRFunctionsFromCore()1233 void InitBindMemory2KHRFunctionsFromCore()
1234 {
1235 ASSIGN_FROM_CORE(vkBindBufferMemory2, KHR);
1236 ASSIGN_FROM_CORE(vkBindImageMemory2, KHR);
1237 }
1238
1239 #undef ASSIGN_FROM_CORE
1240
CalculateGenerateMipmapFilter(ContextVk * contextVk,angle::FormatID formatID)1241 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID)
1242 {
1243 const bool formatSupportsLinearFiltering = contextVk->getRenderer()->hasImageFormatFeatureBits(
1244 formatID, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
1245 const bool hintFastest = contextVk->getState().getGenerateMipmapHint() == GL_FASTEST;
1246
1247 return formatSupportsLinearFiltering && !hintFastest ? GL_LINEAR : GL_NEAREST;
1248 }
1249
1250 // Return the log of samples. Assumes |sampleCount| is a power of 2. The result can be used to
1251 // index an array based on sample count. See for example TextureVk::PerSampleCountArray.
PackSampleCount(GLint sampleCount)1252 size_t PackSampleCount(GLint sampleCount)
1253 {
1254 if (sampleCount == 0)
1255 {
1256 sampleCount = 1;
1257 }
1258
1259 // We currently only support up to 16xMSAA.
1260 ASSERT(sampleCount <= VK_SAMPLE_COUNT_16_BIT);
1261 ASSERT(gl::isPow2(sampleCount));
1262 return gl::ScanForward(static_cast<uint32_t>(sampleCount));
1263 }
1264
1265 namespace gl_vk
1266 {
1267
GetFilter(const GLenum filter)1268 VkFilter GetFilter(const GLenum filter)
1269 {
1270 switch (filter)
1271 {
1272 case GL_LINEAR_MIPMAP_LINEAR:
1273 case GL_LINEAR_MIPMAP_NEAREST:
1274 case GL_LINEAR:
1275 return VK_FILTER_LINEAR;
1276 case GL_NEAREST_MIPMAP_LINEAR:
1277 case GL_NEAREST_MIPMAP_NEAREST:
1278 case GL_NEAREST:
1279 return VK_FILTER_NEAREST;
1280 default:
1281 UNIMPLEMENTED();
1282 return VK_FILTER_MAX_ENUM;
1283 }
1284 }
1285
GetSamplerMipmapMode(const GLenum filter)1286 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
1287 {
1288 switch (filter)
1289 {
1290 case GL_LINEAR_MIPMAP_LINEAR:
1291 case GL_NEAREST_MIPMAP_LINEAR:
1292 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
1293 case GL_LINEAR:
1294 case GL_NEAREST:
1295 case GL_NEAREST_MIPMAP_NEAREST:
1296 case GL_LINEAR_MIPMAP_NEAREST:
1297 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
1298 default:
1299 UNIMPLEMENTED();
1300 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
1301 }
1302 }
1303
GetSamplerAddressMode(const GLenum wrap)1304 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
1305 {
1306 switch (wrap)
1307 {
1308 case GL_REPEAT:
1309 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
1310 case GL_MIRRORED_REPEAT:
1311 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
1312 case GL_CLAMP_TO_BORDER:
1313 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
1314 case GL_CLAMP_TO_EDGE:
1315 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1316 case GL_MIRROR_CLAMP_TO_EDGE_EXT:
1317 return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
1318 default:
1319 UNIMPLEMENTED();
1320 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
1321 }
1322 }
1323
GetRect(const gl::Rectangle & source)1324 VkRect2D GetRect(const gl::Rectangle &source)
1325 {
1326 return {{source.x, source.y},
1327 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
1328 }
1329
GetPrimitiveTopology(gl::PrimitiveMode mode)1330 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
1331 {
1332 switch (mode)
1333 {
1334 case gl::PrimitiveMode::Triangles:
1335 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1336 case gl::PrimitiveMode::Points:
1337 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1338 case gl::PrimitiveMode::Lines:
1339 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
1340 case gl::PrimitiveMode::LineStrip:
1341 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1342 case gl::PrimitiveMode::TriangleFan:
1343 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
1344 case gl::PrimitiveMode::TriangleStrip:
1345 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1346 case gl::PrimitiveMode::LineLoop:
1347 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1348 case gl::PrimitiveMode::LinesAdjacency:
1349 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
1350 case gl::PrimitiveMode::LineStripAdjacency:
1351 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
1352 case gl::PrimitiveMode::TrianglesAdjacency:
1353 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
1354 case gl::PrimitiveMode::TriangleStripAdjacency:
1355 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
1356 case gl::PrimitiveMode::Patches:
1357 return VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
1358 default:
1359 UNREACHABLE();
1360 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1361 }
1362 }
1363
GetPolygonMode(const gl::PolygonMode polygonMode)1364 VkPolygonMode GetPolygonMode(const gl::PolygonMode polygonMode)
1365 {
1366 switch (polygonMode)
1367 {
1368 case gl::PolygonMode::Point:
1369 return VK_POLYGON_MODE_POINT;
1370 case gl::PolygonMode::Line:
1371 return VK_POLYGON_MODE_LINE;
1372 case gl::PolygonMode::Fill:
1373 return VK_POLYGON_MODE_FILL;
1374 default:
1375 UNREACHABLE();
1376 return VK_POLYGON_MODE_FILL;
1377 }
1378 }
1379
GetCullMode(const gl::RasterizerState & rasterState)1380 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
1381 {
1382 if (!rasterState.cullFace)
1383 {
1384 return VK_CULL_MODE_NONE;
1385 }
1386
1387 switch (rasterState.cullMode)
1388 {
1389 case gl::CullFaceMode::Front:
1390 return VK_CULL_MODE_FRONT_BIT;
1391 case gl::CullFaceMode::Back:
1392 return VK_CULL_MODE_BACK_BIT;
1393 case gl::CullFaceMode::FrontAndBack:
1394 return VK_CULL_MODE_FRONT_AND_BACK;
1395 default:
1396 UNREACHABLE();
1397 return VK_CULL_MODE_NONE;
1398 }
1399 }
1400
GetFrontFace(GLenum frontFace,bool invertCullFace)1401 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
1402 {
1403 // Invert CW and CCW to have the same behavior as OpenGL.
1404 switch (frontFace)
1405 {
1406 case GL_CW:
1407 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
1408 case GL_CCW:
1409 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
1410 default:
1411 UNREACHABLE();
1412 return VK_FRONT_FACE_CLOCKWISE;
1413 }
1414 }
1415
GetSamples(GLint sampleCount,bool limitSampleCountTo2)1416 VkSampleCountFlagBits GetSamples(GLint sampleCount, bool limitSampleCountTo2)
1417 {
1418 if (limitSampleCountTo2)
1419 {
1420 // Limiting samples to 2 allows multisampling to work while reducing
1421 // how much graphics memory is required. This makes ANGLE nonconformant
1422 // (GLES 3.0+ requires 4 samples minimum) but gives low memory systems a
1423 // better chance of running applications.
1424 sampleCount = std::min(sampleCount, 2);
1425 }
1426
1427 switch (sampleCount)
1428 {
1429 case 0:
1430 UNREACHABLE();
1431 return VK_SAMPLE_COUNT_1_BIT;
1432 case 1:
1433 return VK_SAMPLE_COUNT_1_BIT;
1434 case 2:
1435 return VK_SAMPLE_COUNT_2_BIT;
1436 case 4:
1437 return VK_SAMPLE_COUNT_4_BIT;
1438 case 8:
1439 return VK_SAMPLE_COUNT_8_BIT;
1440 case 16:
1441 return VK_SAMPLE_COUNT_16_BIT;
1442 case 32:
1443 return VK_SAMPLE_COUNT_32_BIT;
1444 default:
1445 UNREACHABLE();
1446 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1447 }
1448 }
1449
GetSwizzle(const GLenum swizzle)1450 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1451 {
1452 switch (swizzle)
1453 {
1454 case GL_ALPHA:
1455 return VK_COMPONENT_SWIZZLE_A;
1456 case GL_RED:
1457 return VK_COMPONENT_SWIZZLE_R;
1458 case GL_GREEN:
1459 return VK_COMPONENT_SWIZZLE_G;
1460 case GL_BLUE:
1461 return VK_COMPONENT_SWIZZLE_B;
1462 case GL_ZERO:
1463 return VK_COMPONENT_SWIZZLE_ZERO;
1464 case GL_ONE:
1465 return VK_COMPONENT_SWIZZLE_ONE;
1466 default:
1467 UNREACHABLE();
1468 return VK_COMPONENT_SWIZZLE_IDENTITY;
1469 }
1470 }
1471
GetCompareOp(const GLenum compareFunc)1472 VkCompareOp GetCompareOp(const GLenum compareFunc)
1473 {
1474 switch (compareFunc)
1475 {
1476 case GL_NEVER:
1477 return VK_COMPARE_OP_NEVER;
1478 case GL_LESS:
1479 return VK_COMPARE_OP_LESS;
1480 case GL_EQUAL:
1481 return VK_COMPARE_OP_EQUAL;
1482 case GL_LEQUAL:
1483 return VK_COMPARE_OP_LESS_OR_EQUAL;
1484 case GL_GREATER:
1485 return VK_COMPARE_OP_GREATER;
1486 case GL_NOTEQUAL:
1487 return VK_COMPARE_OP_NOT_EQUAL;
1488 case GL_GEQUAL:
1489 return VK_COMPARE_OP_GREATER_OR_EQUAL;
1490 case GL_ALWAYS:
1491 return VK_COMPARE_OP_ALWAYS;
1492 default:
1493 UNREACHABLE();
1494 return VK_COMPARE_OP_ALWAYS;
1495 }
1496 }
1497
GetStencilOp(GLenum compareOp)1498 VkStencilOp GetStencilOp(GLenum compareOp)
1499 {
1500 switch (compareOp)
1501 {
1502 case GL_KEEP:
1503 return VK_STENCIL_OP_KEEP;
1504 case GL_ZERO:
1505 return VK_STENCIL_OP_ZERO;
1506 case GL_REPLACE:
1507 return VK_STENCIL_OP_REPLACE;
1508 case GL_INCR:
1509 return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
1510 case GL_DECR:
1511 return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
1512 case GL_INCR_WRAP:
1513 return VK_STENCIL_OP_INCREMENT_AND_WRAP;
1514 case GL_DECR_WRAP:
1515 return VK_STENCIL_OP_DECREMENT_AND_WRAP;
1516 case GL_INVERT:
1517 return VK_STENCIL_OP_INVERT;
1518 default:
1519 UNREACHABLE();
1520 return VK_STENCIL_OP_KEEP;
1521 }
1522 }
1523
GetLogicOp(const GLenum logicOp)1524 VkLogicOp GetLogicOp(const GLenum logicOp)
1525 {
1526 // GL's logic op values are 0x1500 + op, where op is the same value as Vulkan's VkLogicOp.
1527 return static_cast<VkLogicOp>(logicOp - GL_CLEAR);
1528 }
1529
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1530 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1531 {
1532 vkOffset->x = glOffset.x;
1533 vkOffset->y = glOffset.y;
1534 vkOffset->z = glOffset.z;
1535 }
1536
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1537 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1538 {
1539 vkExtent->width = glExtent.width;
1540 vkExtent->height = glExtent.height;
1541 vkExtent->depth = glExtent.depth;
1542 }
1543
GetImageType(gl::TextureType textureType)1544 VkImageType GetImageType(gl::TextureType textureType)
1545 {
1546 switch (textureType)
1547 {
1548 case gl::TextureType::_2D:
1549 case gl::TextureType::_2DArray:
1550 case gl::TextureType::_2DMultisample:
1551 case gl::TextureType::_2DMultisampleArray:
1552 case gl::TextureType::CubeMap:
1553 case gl::TextureType::CubeMapArray:
1554 case gl::TextureType::External:
1555 return VK_IMAGE_TYPE_2D;
1556 case gl::TextureType::_3D:
1557 return VK_IMAGE_TYPE_3D;
1558 default:
1559 // We will need to implement all the texture types for ES3+.
1560 UNIMPLEMENTED();
1561 return VK_IMAGE_TYPE_MAX_ENUM;
1562 }
1563 }
1564
GetImageViewType(gl::TextureType textureType)1565 VkImageViewType GetImageViewType(gl::TextureType textureType)
1566 {
1567 switch (textureType)
1568 {
1569 case gl::TextureType::_2D:
1570 case gl::TextureType::_2DMultisample:
1571 case gl::TextureType::External:
1572 return VK_IMAGE_VIEW_TYPE_2D;
1573 case gl::TextureType::_2DArray:
1574 case gl::TextureType::_2DMultisampleArray:
1575 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1576 case gl::TextureType::_3D:
1577 return VK_IMAGE_VIEW_TYPE_3D;
1578 case gl::TextureType::CubeMap:
1579 return VK_IMAGE_VIEW_TYPE_CUBE;
1580 case gl::TextureType::CubeMapArray:
1581 return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
1582 default:
1583 // We will need to implement all the texture types for ES3+.
1584 UNIMPLEMENTED();
1585 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1586 }
1587 }
1588
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1589 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1590 {
1591 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1592 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1593 }
1594
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1595 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1596 {
1597 VkShaderStageFlags flags = 0;
1598 for (const gl::ShaderType shaderType : activeShaders)
1599 {
1600 flags |= kShaderStageMap[shaderType];
1601 }
1602 return flags;
1603 }
1604
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,bool clipSpaceOriginUpperLeft,GLint renderAreaHeight,VkViewport * viewportOut)1605 void GetViewport(const gl::Rectangle &viewport,
1606 float nearPlane,
1607 float farPlane,
1608 bool invertViewport,
1609 bool clipSpaceOriginUpperLeft,
1610 GLint renderAreaHeight,
1611 VkViewport *viewportOut)
1612 {
1613 viewportOut->x = static_cast<float>(viewport.x);
1614 viewportOut->y = static_cast<float>(viewport.y);
1615 viewportOut->width = static_cast<float>(viewport.width);
1616 viewportOut->height = static_cast<float>(viewport.height);
1617 viewportOut->minDepth = gl::clamp01(nearPlane);
1618 viewportOut->maxDepth = gl::clamp01(farPlane);
1619
1620 // Say an application intends to draw a primitive (shown as 'o' below), it can choose to use
1621 // different clip space origin. When clip space origin (shown as 'C' below) is switched from
1622 // lower-left to upper-left, primitives will be rendered with its y-coordinate flipped.
1623
1624 // Rendered content will differ based on whether it is a default framebuffer or a user defined
1625 // framebuffer. We modify the viewport's 'y' and 'h' accordingly.
1626
1627 // clip space origin is lower-left
1628 // Expected draw in GLES default framebuffer user defined framebuffer
1629 // (0,H) (0,0) (0,0)
1630 // + +-----------+ (W,0) +-----------+ (W,0)
1631 // | | | C----+
1632 // | | | | | (h)
1633 // | +----+ | +----+ | | O |
1634 // | | O | | | O | (-h) | +----+
1635 // | | | | | | |
1636 // | C----+ | C----+ |
1637 // +-----------+ (W,0) + +
1638 // (0,0) (0,H) (0,H)
1639 // y' = H - h y' = y
1640
1641 // clip space origin is upper-left
1642 // Expected draw in GLES default framebuffer user defined framebuffer
1643 // (0,H) (0,0) (0,0)
1644 // + +-----------+ (W,0) +-----------+ (W,0)
1645 // | | | +----+
1646 // | | | | O | (-h)
1647 // | C----+ | C----+ | | |
1648 // | | | | | | (h) | C----+
1649 // | | O | | | O | |
1650 // | +----+ | +----+ |
1651 // +-----------+ (W,0) + +
1652 // (0,0) (0,H) (0,H)
1653 // y' = H - (y + h) y' = y + H
1654
1655 if (clipSpaceOriginUpperLeft)
1656 {
1657 if (invertViewport)
1658 {
1659 viewportOut->y = static_cast<float>(renderAreaHeight - (viewport.height + viewport.y));
1660 }
1661 else
1662 {
1663 viewportOut->y = static_cast<float>(viewport.height + viewport.y);
1664 viewportOut->height = -viewportOut->height;
1665 }
1666 }
1667 else
1668 {
1669 if (invertViewport)
1670 {
1671 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
1672 viewportOut->height = -viewportOut->height;
1673 }
1674 }
1675 }
1676
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1677 void GetExtentsAndLayerCount(gl::TextureType textureType,
1678 const gl::Extents &extents,
1679 VkExtent3D *extentsOut,
1680 uint32_t *layerCountOut)
1681 {
1682 extentsOut->width = extents.width;
1683 extentsOut->height = extents.height;
1684
1685 switch (textureType)
1686 {
1687 case gl::TextureType::CubeMap:
1688 extentsOut->depth = 1;
1689 *layerCountOut = gl::kCubeFaceCount;
1690 break;
1691
1692 case gl::TextureType::_2DArray:
1693 case gl::TextureType::_2DMultisampleArray:
1694 case gl::TextureType::CubeMapArray:
1695 extentsOut->depth = 1;
1696 *layerCountOut = extents.depth;
1697 break;
1698
1699 default:
1700 extentsOut->depth = extents.depth;
1701 *layerCountOut = 1;
1702 break;
1703 }
1704 }
1705
GetLevelIndex(gl::LevelIndex levelGL,gl::LevelIndex baseLevel)1706 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel)
1707 {
1708 ASSERT(baseLevel <= levelGL);
1709 return vk::LevelIndex(levelGL.get() - baseLevel.get());
1710 }
1711
1712 } // namespace gl_vk
1713
1714 namespace vk_gl
1715 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1716 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1717 {
1718 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1719 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1720 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1721 {
1722 setOut->insert(static_cast<GLuint>(1 << bit));
1723 }
1724 }
1725
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1726 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1727 {
1728 GLuint maxCount = 0;
1729 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1730 {
1731 maxCount = static_cast<GLuint>(1 << bit);
1732 }
1733 return maxCount;
1734 }
1735
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1736 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1737 {
1738 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1739 {
1740 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1741 if (sampleCount >= requestedCount)
1742 {
1743 return sampleCount;
1744 }
1745 }
1746
1747 UNREACHABLE();
1748 return 0;
1749 }
1750
GetLevelIndex(vk::LevelIndex levelVk,gl::LevelIndex baseLevel)1751 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel)
1752 {
1753 return gl::LevelIndex(levelVk.get() + baseLevel.get());
1754 }
1755 } // namespace vk_gl
1756 } // namespace rx
1757