1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/renderer/vulkan/BufferVk.h"
14 #include "libANGLE/renderer/vulkan/ContextVk.h"
15 #include "libANGLE/renderer/vulkan/DisplayVk.h"
16 #include "libANGLE/renderer/vulkan/RendererVk.h"
17 #include "libANGLE/renderer/vulkan/ResourceVk.h"
18 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
19
20 namespace angle
21 {
ToEGL(Result result,rx::DisplayVk * displayVk,EGLint errorCode)22 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode)
23 {
24 if (result != angle::Result::Continue)
25 {
26 return displayVk->getEGLError(errorCode);
27 }
28 else
29 {
30 return egl::NoError();
31 }
32 }
33 } // namespace angle
34
35 namespace rx
36 {
37 namespace
38 {
GetStagingBufferUsageFlags(vk::StagingUsage usage)39 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
40 {
41 switch (usage)
42 {
43 case vk::StagingUsage::Read:
44 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
45 case vk::StagingUsage::Write:
46 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
47 case vk::StagingUsage::Both:
48 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
49 default:
50 UNREACHABLE();
51 return 0;
52 }
53 }
54
FindAndAllocateCompatibleMemory(vk::Context * context,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,vk::DeviceMemory * deviceMemoryOut)55 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
56 const vk::MemoryProperties &memoryProperties,
57 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
58 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
59 const VkMemoryRequirements &memoryRequirements,
60 const void *extraAllocationInfo,
61 vk::DeviceMemory *deviceMemoryOut)
62 {
63 // Pick an arbitrary value to initialize non-zero memory for sanitization.
64 constexpr int kNonZeroInitValue = 55;
65
66 VkDevice device = context->getDevice();
67
68 uint32_t memoryTypeIndex = 0;
69 ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(context, memoryRequirements,
70 requestedMemoryPropertyFlags,
71 memoryPropertyFlagsOut, &memoryTypeIndex));
72
73 VkMemoryAllocateInfo allocInfo = {};
74 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
75 allocInfo.pNext = extraAllocationInfo;
76 allocInfo.memoryTypeIndex = memoryTypeIndex;
77 allocInfo.allocationSize = memoryRequirements.size;
78
79 ANGLE_VK_TRY(context, deviceMemoryOut->allocate(device, allocInfo));
80
81 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
82 // invalid values ensures our testing doesn't assume zero-initialized memory.
83 RendererVk *renderer = context->getRenderer();
84 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
85 {
86 if ((*memoryPropertyFlagsOut & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
87 {
88 // Can map the memory.
89 ANGLE_TRY(vk::InitMappableDeviceMemory(context, deviceMemoryOut,
90 memoryRequirements.size, kNonZeroInitValue,
91 *memoryPropertyFlagsOut));
92 }
93 }
94
95 return angle::Result::Continue;
96 }
97
98 template <typename T>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut)99 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
100 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
101 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
102 const VkMemoryRequirements &memoryRequirements,
103 const void *extraAllocationInfo,
104 T *bufferOrImage,
105 vk::DeviceMemory *deviceMemoryOut)
106 {
107 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
108
109 ANGLE_TRY(FindAndAllocateCompatibleMemory(
110 context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
111 memoryRequirements, extraAllocationInfo, deviceMemoryOut));
112 ANGLE_VK_TRY(context, bufferOrImage->bindMemory(context->getDevice(), *deviceMemoryOut));
113 return angle::Result::Continue;
114 }
115
116 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)117 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
118 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
119 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
120 const void *extraAllocationInfo,
121 T *bufferOrImage,
122 vk::DeviceMemory *deviceMemoryOut,
123 VkDeviceSize *sizeOut)
124 {
125 // Call driver to determine memory requirements.
126 VkMemoryRequirements memoryRequirements;
127 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
128
129 ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
130 context, requestedMemoryPropertyFlags, memoryPropertyFlagsOut, memoryRequirements,
131 extraAllocationInfo, bufferOrImage, deviceMemoryOut));
132
133 *sizeOut = memoryRequirements.size;
134
135 return angle::Result::Continue;
136 }
137
138 // Unified layer that includes full validation layer stack
139 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
140 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
141 const char *kVkValidationLayerNames[] = {
142 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
143 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
144 "VK_LAYER_GOOGLE_unique_objects"};
145
HasValidationLayer(const std::vector<VkLayerProperties> & layerProps,const char * layerName)146 bool HasValidationLayer(const std::vector<VkLayerProperties> &layerProps, const char *layerName)
147 {
148 for (const auto &layerProp : layerProps)
149 {
150 if (std::string(layerProp.layerName) == layerName)
151 {
152 return true;
153 }
154 }
155
156 return false;
157 }
158
HasKhronosValidationLayer(const std::vector<VkLayerProperties> & layerProps)159 bool HasKhronosValidationLayer(const std::vector<VkLayerProperties> &layerProps)
160 {
161 return HasValidationLayer(layerProps, kVkKhronosValidationLayerName);
162 }
163
HasStandardValidationLayer(const std::vector<VkLayerProperties> & layerProps)164 bool HasStandardValidationLayer(const std::vector<VkLayerProperties> &layerProps)
165 {
166 return HasValidationLayer(layerProps, kVkStandardValidationLayerName);
167 }
168
HasValidationLayers(const std::vector<VkLayerProperties> & layerProps)169 bool HasValidationLayers(const std::vector<VkLayerProperties> &layerProps)
170 {
171 for (const char *layerName : kVkValidationLayerNames)
172 {
173 if (!HasValidationLayer(layerProps, layerName))
174 {
175 return false;
176 }
177 }
178
179 return true;
180 }
181 } // anonymous namespace
182
VulkanResultString(VkResult result)183 const char *VulkanResultString(VkResult result)
184 {
185 switch (result)
186 {
187 case VK_SUCCESS:
188 return "Command successfully completed";
189 case VK_NOT_READY:
190 return "A fence or query has not yet completed";
191 case VK_TIMEOUT:
192 return "A wait operation has not completed in the specified time";
193 case VK_EVENT_SET:
194 return "An event is signaled";
195 case VK_EVENT_RESET:
196 return "An event is unsignaled";
197 case VK_INCOMPLETE:
198 return "A return array was too small for the result";
199 case VK_SUBOPTIMAL_KHR:
200 return "A swapchain no longer matches the surface properties exactly, but can still be "
201 "used to present to the surface successfully";
202 case VK_ERROR_OUT_OF_HOST_MEMORY:
203 return "A host memory allocation has failed";
204 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
205 return "A device memory allocation has failed";
206 case VK_ERROR_INITIALIZATION_FAILED:
207 return "Initialization of an object could not be completed for implementation-specific "
208 "reasons";
209 case VK_ERROR_DEVICE_LOST:
210 return "The logical or physical device has been lost";
211 case VK_ERROR_MEMORY_MAP_FAILED:
212 return "Mapping of a memory object has failed";
213 case VK_ERROR_LAYER_NOT_PRESENT:
214 return "A requested layer is not present or could not be loaded";
215 case VK_ERROR_EXTENSION_NOT_PRESENT:
216 return "A requested extension is not supported";
217 case VK_ERROR_FEATURE_NOT_PRESENT:
218 return "A requested feature is not supported";
219 case VK_ERROR_INCOMPATIBLE_DRIVER:
220 return "The requested version of Vulkan is not supported by the driver or is otherwise "
221 "incompatible for implementation-specific reasons";
222 case VK_ERROR_TOO_MANY_OBJECTS:
223 return "Too many objects of the type have already been created";
224 case VK_ERROR_FORMAT_NOT_SUPPORTED:
225 return "A requested format is not supported on this device";
226 case VK_ERROR_SURFACE_LOST_KHR:
227 return "A surface is no longer available";
228 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
229 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
230 "non-Vulkan API";
231 case VK_ERROR_OUT_OF_DATE_KHR:
232 return "A surface has changed in such a way that it is no longer compatible with the "
233 "swapchain";
234 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
235 return "The display used by a swapchain does not use the same presentable image "
236 "layout, or is incompatible in a way that prevents sharing an image";
237 case VK_ERROR_VALIDATION_FAILED_EXT:
238 return "The validation layers detected invalid API usage";
239 case VK_ERROR_INVALID_SHADER_NV:
240 return "Invalid Vulkan shader was generated";
241 case VK_ERROR_OUT_OF_POOL_MEMORY:
242 return "A pool memory allocation has failed";
243 case VK_ERROR_FRAGMENTED_POOL:
244 return "A pool allocation has failed due to fragmentation of the pool's memory";
245 case VK_ERROR_INVALID_EXTERNAL_HANDLE:
246 return "An external handle is not a valid handle of the specified type";
247 default:
248 return "Unknown vulkan error code";
249 }
250 }
251
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)252 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
253 bool mustHaveLayers,
254 VulkanLayerVector *enabledLayerNames)
255 {
256 // Favor unified Khronos layer, but fallback to standard validation
257 if (HasKhronosValidationLayer(layerProps))
258 {
259 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
260 }
261 else if (HasStandardValidationLayer(layerProps))
262 {
263 enabledLayerNames->push_back(kVkStandardValidationLayerName);
264 }
265 else if (HasValidationLayers(layerProps))
266 {
267 for (const char *layerName : kVkValidationLayerNames)
268 {
269 enabledLayerNames->push_back(layerName);
270 }
271 }
272 else
273 {
274 // Generate an error if the layers were explicitly requested, warning otherwise.
275 if (mustHaveLayers)
276 {
277 ERR() << "Vulkan validation layers are missing.";
278 }
279 else
280 {
281 WARN() << "Vulkan validation layers are missing.";
282 }
283
284 return false;
285 }
286
287 return true;
288 }
289
290 namespace vk
291 {
292 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
293 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
294 const char *gANGLEPreferredDevice = "ANGLE_PREFERRED_DEVICE";
295
GetDepthStencilAspectFlags(const angle::Format & format)296 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
297 {
298 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
299 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
300 }
301
GetFormatAspectFlags(const angle::Format & format)302 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
303 {
304 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
305 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
306 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
307 // channels.
308 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
309 }
310
311 // Context implementation.
Context(RendererVk * renderer)312 Context::Context(RendererVk *renderer) : mRenderer(renderer) {}
313
~Context()314 Context::~Context() {}
315
getDevice() const316 VkDevice Context::getDevice() const
317 {
318 return mRenderer->getDevice();
319 }
320
321 // MemoryProperties implementation.
MemoryProperties()322 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
323
init(VkPhysicalDevice physicalDevice)324 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
325 {
326 ASSERT(mMemoryProperties.memoryTypeCount == 0);
327 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
328 ASSERT(mMemoryProperties.memoryTypeCount > 0);
329 }
330
destroy()331 void MemoryProperties::destroy()
332 {
333 mMemoryProperties = {};
334 }
335
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const336 angle::Result MemoryProperties::findCompatibleMemoryIndex(
337 Context *context,
338 const VkMemoryRequirements &memoryRequirements,
339 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
340 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
341 uint32_t *typeIndexOut) const
342 {
343 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
344
345 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
346 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
347 // TODO(jmadill): Determine if it is possible to cache indexes.
348 // TODO(jmadill): More efficient memory allocation.
349 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
350 {
351 ASSERT(memoryIndex < mMemoryProperties.memoryTypeCount);
352
353 if ((mMemoryProperties.memoryTypes[memoryIndex].propertyFlags &
354 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
355 {
356 *memoryPropertyFlagsOut = mMemoryProperties.memoryTypes[memoryIndex].propertyFlags;
357 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
358 return angle::Result::Continue;
359 }
360 }
361
362 // We did not find a compatible memory type, the Vulkan spec says the following -
363 // There must be at least one memory type with both the
364 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
365 // bits set in its propertyFlags
366 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
367 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
368
369 // If the caller wanted a host visible memory, just return the memory index
370 // with the fallback memory flags.
371 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
372 {
373 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
374 {
375 if ((mMemoryProperties.memoryTypes[memoryIndex].propertyFlags &
376 fallbackMemoryPropertyFlags) == fallbackMemoryPropertyFlags)
377 {
378 *memoryPropertyFlagsOut = mMemoryProperties.memoryTypes[memoryIndex].propertyFlags;
379 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
380 return angle::Result::Continue;
381 }
382 }
383 }
384
385 // TODO(jmadill): Add error message to error.
386 context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
387 return angle::Result::Stop;
388 }
389
390 // StagingBuffer implementation.
StagingBuffer()391 StagingBuffer::StagingBuffer() : mSize(0) {}
392
destroy(RendererVk * renderer)393 void StagingBuffer::destroy(RendererVk *renderer)
394 {
395 VkDevice device = renderer->getDevice();
396 mBuffer.destroy(device);
397 mAllocation.destroy(renderer->getAllocator());
398 mSize = 0;
399 }
400
init(Context * context,VkDeviceSize size,StagingUsage usage)401 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
402 {
403 VkBufferCreateInfo createInfo = {};
404 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
405 createInfo.flags = 0;
406 createInfo.size = size;
407 createInfo.usage = GetStagingBufferUsageFlags(usage);
408 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
409 createInfo.queueFamilyIndexCount = 0;
410 createInfo.pQueueFamilyIndices = nullptr;
411
412 VkMemoryPropertyFlags preferredFlags = 0;
413 VkMemoryPropertyFlags requiredFlags =
414 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
415
416 RendererVk *renderer = context->getRenderer();
417 const vk::Allocator &allocator = renderer->getAllocator();
418
419 uint32_t memoryTypeIndex = 0;
420 ANGLE_VK_TRY(context,
421 allocator.createBuffer(createInfo, requiredFlags, preferredFlags,
422 renderer->getFeatures().persistentlyMappedBuffers.enabled,
423 &memoryTypeIndex, &mBuffer, &mAllocation));
424 mSize = static_cast<size_t>(size);
425 return angle::Result::Continue;
426 }
427
release(ContextVk * contextVk)428 void StagingBuffer::release(ContextVk *contextVk)
429 {
430 contextVk->addGarbage(&mBuffer);
431 contextVk->addGarbage(&mAllocation);
432 }
433
collectGarbage(RendererVk * renderer,Serial serial)434 void StagingBuffer::collectGarbage(RendererVk *renderer, Serial serial)
435 {
436 vk::GarbageList garbageList;
437 garbageList.emplace_back(vk::GetGarbage(&mBuffer));
438 garbageList.emplace_back(vk::GetGarbage(&mAllocation));
439
440 vk::SharedResourceUse sharedUse;
441 sharedUse.init();
442 sharedUse.updateSerialOneOff(serial);
443 renderer->collectGarbage(std::move(sharedUse), std::move(garbageList));
444 }
445
InitMappableAllocation(const vk::Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)446 angle::Result InitMappableAllocation(const vk::Allocator &allocator,
447 Allocation *allocation,
448 VkDeviceSize size,
449 int value,
450 VkMemoryPropertyFlags memoryPropertyFlags)
451 {
452 uint8_t *mapPointer;
453 allocation->map(allocator, &mapPointer);
454 memset(mapPointer, value, static_cast<size_t>(size));
455
456 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
457 {
458 allocation->flush(allocator, 0, size);
459 }
460
461 allocation->unmap(allocator);
462
463 return angle::Result::Continue;
464 }
465
InitMappableDeviceMemory(Context * context,DeviceMemory * deviceMemory,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)466 angle::Result InitMappableDeviceMemory(Context *context,
467 DeviceMemory *deviceMemory,
468 VkDeviceSize size,
469 int value,
470 VkMemoryPropertyFlags memoryPropertyFlags)
471 {
472 VkDevice device = context->getDevice();
473
474 uint8_t *mapPointer;
475 ANGLE_VK_TRY(context, deviceMemory->map(device, 0, VK_WHOLE_SIZE, 0, &mapPointer));
476 memset(mapPointer, value, static_cast<size_t>(size));
477
478 // if the memory type is not host coherent, we perform an explicit flush
479 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
480 {
481 VkMappedMemoryRange mappedRange = {};
482 mappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
483 mappedRange.memory = deviceMemory->getHandle();
484 mappedRange.size = VK_WHOLE_SIZE;
485 ANGLE_VK_TRY(context, vkFlushMappedMemoryRanges(device, 1, &mappedRange));
486 }
487
488 deviceMemory->unmap(device);
489
490 return angle::Result::Continue;
491 }
492
AllocateBufferMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)493 angle::Result AllocateBufferMemory(vk::Context *context,
494 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
495 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
496 const void *extraAllocationInfo,
497 Buffer *buffer,
498 DeviceMemory *deviceMemoryOut,
499 VkDeviceSize *sizeOut)
500 {
501 return AllocateBufferOrImageMemory(context, requestedMemoryPropertyFlags,
502 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
503 deviceMemoryOut, sizeOut);
504 }
505
AllocateImageMemory(vk::Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)506 angle::Result AllocateImageMemory(vk::Context *context,
507 VkMemoryPropertyFlags memoryPropertyFlags,
508 const void *extraAllocationInfo,
509 Image *image,
510 DeviceMemory *deviceMemoryOut,
511 VkDeviceSize *sizeOut)
512 {
513 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
514 return AllocateBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
515 extraAllocationInfo, image, deviceMemoryOut, sizeOut);
516 }
517
AllocateImageMemoryWithRequirements(vk::Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut)518 angle::Result AllocateImageMemoryWithRequirements(vk::Context *context,
519 VkMemoryPropertyFlags memoryPropertyFlags,
520 const VkMemoryRequirements &memoryRequirements,
521 const void *extraAllocationInfo,
522 Image *image,
523 DeviceMemory *deviceMemoryOut)
524 {
525 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
526 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
527 memoryRequirements, extraAllocationInfo, image,
528 deviceMemoryOut);
529 }
530
InitShaderAndSerial(Context * context,ShaderAndSerial * shaderAndSerial,const uint32_t * shaderCode,size_t shaderCodeSize)531 angle::Result InitShaderAndSerial(Context *context,
532 ShaderAndSerial *shaderAndSerial,
533 const uint32_t *shaderCode,
534 size_t shaderCodeSize)
535 {
536 VkShaderModuleCreateInfo createInfo = {};
537 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
538 createInfo.flags = 0;
539 createInfo.codeSize = shaderCodeSize;
540 createInfo.pCode = shaderCode;
541
542 ANGLE_VK_TRY(context, shaderAndSerial->get().init(context->getDevice(), createInfo));
543 shaderAndSerial->updateSerial(context->getRenderer()->issueShaderSerial());
544 return angle::Result::Continue;
545 }
546
Get2DTextureType(uint32_t layerCount,GLint samples)547 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
548 {
549 if (layerCount > 1)
550 {
551 if (samples > 1)
552 {
553 return gl::TextureType::_2DMultisampleArray;
554 }
555 else
556 {
557 return gl::TextureType::_2DArray;
558 }
559 }
560 else
561 {
562 if (samples > 1)
563 {
564 return gl::TextureType::_2DMultisample;
565 }
566 else
567 {
568 return gl::TextureType::_2D;
569 }
570 }
571 }
572
GarbageObject()573 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
574
GarbageObject(HandleType handleType,GarbageHandle handle)575 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
576 : mHandleType(handleType), mHandle(handle)
577 {}
578
GarbageObject(GarbageObject && other)579 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
580 {
581 *this = std::move(other);
582 }
583
operator =(GarbageObject && rhs)584 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
585 {
586 std::swap(mHandle, rhs.mHandle);
587 std::swap(mHandleType, rhs.mHandleType);
588 return *this;
589 }
590
591 // GarbageObject implementation
592 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
593 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(RendererVk * renderer)594 void GarbageObject::destroy(RendererVk *renderer)
595 {
596 VkDevice device = renderer->getDevice();
597 switch (mHandleType)
598 {
599 case HandleType::Semaphore:
600 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
601 break;
602 case HandleType::CommandBuffer:
603 // Command buffers are pool allocated.
604 UNREACHABLE();
605 break;
606 case HandleType::Event:
607 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
608 break;
609 case HandleType::Fence:
610 vkDestroyFence(device, (VkFence)mHandle, nullptr);
611 break;
612 case HandleType::DeviceMemory:
613 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
614 break;
615 case HandleType::Buffer:
616 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
617 break;
618 case HandleType::BufferView:
619 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
620 break;
621 case HandleType::Image:
622 vkDestroyImage(device, (VkImage)mHandle, nullptr);
623 break;
624 case HandleType::ImageView:
625 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
626 break;
627 case HandleType::ShaderModule:
628 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
629 break;
630 case HandleType::PipelineLayout:
631 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
632 break;
633 case HandleType::RenderPass:
634 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
635 break;
636 case HandleType::Pipeline:
637 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
638 break;
639 case HandleType::DescriptorSetLayout:
640 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
641 break;
642 case HandleType::Sampler:
643 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
644 break;
645 case HandleType::DescriptorPool:
646 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
647 break;
648 case HandleType::Framebuffer:
649 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
650 break;
651 case HandleType::CommandPool:
652 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
653 break;
654 case HandleType::QueryPool:
655 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
656 break;
657 case HandleType::Allocation:
658 vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
659 break;
660 default:
661 UNREACHABLE();
662 break;
663 }
664
665 renderer->getActiveHandleCounts().onDeallocate(mHandleType);
666 }
667
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)668 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
669 {
670 static constexpr angle::ColorF kLabelColors[6] = {
671 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
672 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
673 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
674 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
675 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
676 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
677 };
678
679 int colorIndex = source - GL_DEBUG_SOURCE_API;
680 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
681
682 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
683 label->pNext = nullptr;
684 label->pLabelName = marker;
685 kLabelColors[colorIndex].writeData(label->color);
686 }
687
688 // ClearValuesArray implementation.
ClearValuesArray()689 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
690
691 ClearValuesArray::~ClearValuesArray() = default;
692
693 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
694
695 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
696
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)697 void ClearValuesArray::store(uint32_t index,
698 VkImageAspectFlags aspectFlags,
699 const VkClearValue &clearValue)
700 {
701 ASSERT(aspectFlags != 0);
702
703 // We do this double if to handle the packed depth-stencil case.
704 if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
705 {
706 // Ensure for packed DS we're writing to the depth index.
707 ASSERT(index == kClearValueDepthIndex ||
708 (index == kClearValueStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
709 mValues[kClearValueStencilIndex] = clearValue;
710 mEnabled.set(kClearValueStencilIndex);
711 }
712
713 if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
714 {
715 mValues[index] = clearValue;
716 mEnabled.set(index);
717 }
718 }
719 } // namespace vk
720
721 #if !defined(ANGLE_SHARED_LIBVULKAN)
722 // VK_EXT_debug_utils
723 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
724 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
725 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
726 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
727 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
728
729 // VK_EXT_debug_report
730 PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
731 PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
732
733 // VK_KHR_get_physical_device_properties2
734 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
735 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = nullptr;
736
737 // VK_KHR_external_semaphore_fd
738 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
739
740 // VK_EXT_external_memory_host
741 PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = nullptr;
742
743 // VK_EXT_transform_feedback
744 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
745 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = nullptr;
746 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = nullptr;
747 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = nullptr;
748 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = nullptr;
749 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = nullptr;
750
751 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
752 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = nullptr;
753
754 // VK_KHR_external_fence_capabilities
755 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
756 nullptr;
757
758 // VK_KHR_external_fence_fd
759 PFN_vkGetFenceFdKHR vkGetFenceFdKHR = nullptr;
760 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
761
762 // VK_KHR_external_semaphore_capabilities
763 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
764 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
765
766 # if defined(ANGLE_PLATFORM_FUCHSIA)
767 // VK_FUCHSIA_imagepipe_surface
768 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
769 # endif
770
771 # if defined(ANGLE_PLATFORM_ANDROID)
772 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
773 nullptr;
774 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
775 # endif
776
777 # if defined(ANGLE_PLATFORM_GGP)
778 PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = nullptr;
779 # endif
780
781 # define GET_INSTANCE_FUNC(vkName) \
782 do \
783 { \
784 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
785 ASSERT(vkName); \
786 } while (0)
787
788 # define GET_DEVICE_FUNC(vkName) \
789 do \
790 { \
791 vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
792 ASSERT(vkName); \
793 } while (0)
794
InitDebugUtilsEXTFunctions(VkInstance instance)795 void InitDebugUtilsEXTFunctions(VkInstance instance)
796 {
797 GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
798 GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
799 GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
800 GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
801 GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
802 }
803
InitDebugReportEXTFunctions(VkInstance instance)804 void InitDebugReportEXTFunctions(VkInstance instance)
805 {
806 GET_INSTANCE_FUNC(vkCreateDebugReportCallbackEXT);
807 GET_INSTANCE_FUNC(vkDestroyDebugReportCallbackEXT);
808 }
809
InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)810 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)
811 {
812 GET_INSTANCE_FUNC(vkGetPhysicalDeviceProperties2KHR);
813 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFeatures2KHR);
814 }
815
InitTransformFeedbackEXTFunctions(VkDevice device)816 void InitTransformFeedbackEXTFunctions(VkDevice device)
817 {
818 GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
819 GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
820 GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
821 GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
822 GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
823 GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
824 }
825
826 # if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)827 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
828 {
829 GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
830 }
831 # endif
832
833 # if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)834 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)
835 {
836 GET_INSTANCE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
837 GET_INSTANCE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
838 }
839 # endif
840
841 # if defined(ANGLE_PLATFORM_GGP)
InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)842 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)
843 {
844 GET_INSTANCE_FUNC(vkCreateStreamDescriptorSurfaceGGP);
845 }
846 # endif // defined(ANGLE_PLATFORM_GGP)
847
InitExternalSemaphoreFdFunctions(VkInstance instance)848 void InitExternalSemaphoreFdFunctions(VkInstance instance)
849 {
850 GET_INSTANCE_FUNC(vkImportSemaphoreFdKHR);
851 }
852
InitExternalMemoryHostFunctions(VkInstance instance)853 void InitExternalMemoryHostFunctions(VkInstance instance)
854 {
855 GET_INSTANCE_FUNC(vkGetMemoryHostPointerPropertiesEXT);
856 }
857
858 // VK_KHR_external_fence_capabilities
InitExternalFenceCapabilitiesFunctions(VkInstance instance)859 void InitExternalFenceCapabilitiesFunctions(VkInstance instance)
860 {
861 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalFencePropertiesKHR);
862 }
863
864 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkInstance instance)865 void InitExternalFenceFdFunctions(VkInstance instance)
866 {
867 GET_INSTANCE_FUNC(vkGetFenceFdKHR);
868 GET_INSTANCE_FUNC(vkImportFenceFdKHR);
869 }
870
871 // VK_KHR_external_semaphore_capabilities
InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)872 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)
873 {
874 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalSemaphorePropertiesKHR);
875 }
876
877 # undef GET_INSTANCE_FUNC
878 # undef GET_DEVICE_FUNC
879
880 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
881
882 namespace gl_vk
883 {
884
GetFilter(const GLenum filter)885 VkFilter GetFilter(const GLenum filter)
886 {
887 switch (filter)
888 {
889 case GL_LINEAR_MIPMAP_LINEAR:
890 case GL_LINEAR_MIPMAP_NEAREST:
891 case GL_LINEAR:
892 return VK_FILTER_LINEAR;
893 case GL_NEAREST_MIPMAP_LINEAR:
894 case GL_NEAREST_MIPMAP_NEAREST:
895 case GL_NEAREST:
896 return VK_FILTER_NEAREST;
897 default:
898 UNIMPLEMENTED();
899 return VK_FILTER_MAX_ENUM;
900 }
901 }
902
GetSamplerMipmapMode(const GLenum filter)903 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
904 {
905 switch (filter)
906 {
907 case GL_LINEAR_MIPMAP_LINEAR:
908 case GL_NEAREST_MIPMAP_LINEAR:
909 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
910 case GL_LINEAR:
911 case GL_NEAREST:
912 case GL_NEAREST_MIPMAP_NEAREST:
913 case GL_LINEAR_MIPMAP_NEAREST:
914 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
915 default:
916 UNIMPLEMENTED();
917 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
918 }
919 }
920
GetSamplerAddressMode(const GLenum wrap)921 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
922 {
923 switch (wrap)
924 {
925 case GL_REPEAT:
926 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
927 case GL_MIRRORED_REPEAT:
928 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
929 case GL_CLAMP_TO_BORDER:
930 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
931 case GL_CLAMP_TO_EDGE:
932 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
933 default:
934 UNIMPLEMENTED();
935 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
936 }
937 }
938
GetRect(const gl::Rectangle & source)939 VkRect2D GetRect(const gl::Rectangle &source)
940 {
941 return {{source.x, source.y},
942 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
943 }
944
GetPrimitiveTopology(gl::PrimitiveMode mode)945 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
946 {
947 switch (mode)
948 {
949 case gl::PrimitiveMode::Triangles:
950 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
951 case gl::PrimitiveMode::Points:
952 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
953 case gl::PrimitiveMode::Lines:
954 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
955 case gl::PrimitiveMode::LineStrip:
956 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
957 case gl::PrimitiveMode::TriangleFan:
958 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
959 case gl::PrimitiveMode::TriangleStrip:
960 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
961 case gl::PrimitiveMode::LineLoop:
962 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
963 default:
964 UNREACHABLE();
965 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
966 }
967 }
968
GetCullMode(const gl::RasterizerState & rasterState)969 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
970 {
971 if (!rasterState.cullFace)
972 {
973 return VK_CULL_MODE_NONE;
974 }
975
976 switch (rasterState.cullMode)
977 {
978 case gl::CullFaceMode::Front:
979 return VK_CULL_MODE_FRONT_BIT;
980 case gl::CullFaceMode::Back:
981 return VK_CULL_MODE_BACK_BIT;
982 case gl::CullFaceMode::FrontAndBack:
983 return VK_CULL_MODE_FRONT_AND_BACK;
984 default:
985 UNREACHABLE();
986 return VK_CULL_MODE_NONE;
987 }
988 }
989
GetFrontFace(GLenum frontFace,bool invertCullFace)990 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
991 {
992 // Invert CW and CCW to have the same behavior as OpenGL.
993 switch (frontFace)
994 {
995 case GL_CW:
996 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
997 case GL_CCW:
998 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
999 default:
1000 UNREACHABLE();
1001 return VK_FRONT_FACE_CLOCKWISE;
1002 }
1003 }
1004
GetSamples(GLint sampleCount)1005 VkSampleCountFlagBits GetSamples(GLint sampleCount)
1006 {
1007 switch (sampleCount)
1008 {
1009 case 0:
1010 case 1:
1011 return VK_SAMPLE_COUNT_1_BIT;
1012 case 2:
1013 return VK_SAMPLE_COUNT_2_BIT;
1014 case 4:
1015 return VK_SAMPLE_COUNT_4_BIT;
1016 case 8:
1017 return VK_SAMPLE_COUNT_8_BIT;
1018 case 16:
1019 return VK_SAMPLE_COUNT_16_BIT;
1020 case 32:
1021 return VK_SAMPLE_COUNT_32_BIT;
1022 default:
1023 UNREACHABLE();
1024 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1025 }
1026 }
1027
GetSwizzle(const GLenum swizzle)1028 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1029 {
1030 switch (swizzle)
1031 {
1032 case GL_ALPHA:
1033 return VK_COMPONENT_SWIZZLE_A;
1034 case GL_RED:
1035 return VK_COMPONENT_SWIZZLE_R;
1036 case GL_GREEN:
1037 return VK_COMPONENT_SWIZZLE_G;
1038 case GL_BLUE:
1039 return VK_COMPONENT_SWIZZLE_B;
1040 case GL_ZERO:
1041 return VK_COMPONENT_SWIZZLE_ZERO;
1042 case GL_ONE:
1043 return VK_COMPONENT_SWIZZLE_ONE;
1044 default:
1045 UNREACHABLE();
1046 return VK_COMPONENT_SWIZZLE_IDENTITY;
1047 }
1048 }
1049
GetCompareOp(const GLenum compareFunc)1050 VkCompareOp GetCompareOp(const GLenum compareFunc)
1051 {
1052 switch (compareFunc)
1053 {
1054 case GL_NEVER:
1055 return VK_COMPARE_OP_NEVER;
1056 case GL_LESS:
1057 return VK_COMPARE_OP_LESS;
1058 case GL_EQUAL:
1059 return VK_COMPARE_OP_EQUAL;
1060 case GL_LEQUAL:
1061 return VK_COMPARE_OP_LESS_OR_EQUAL;
1062 case GL_GREATER:
1063 return VK_COMPARE_OP_GREATER;
1064 case GL_NOTEQUAL:
1065 return VK_COMPARE_OP_NOT_EQUAL;
1066 case GL_GEQUAL:
1067 return VK_COMPARE_OP_GREATER_OR_EQUAL;
1068 case GL_ALWAYS:
1069 return VK_COMPARE_OP_ALWAYS;
1070 default:
1071 UNREACHABLE();
1072 return VK_COMPARE_OP_ALWAYS;
1073 }
1074 }
1075
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1076 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1077 {
1078 vkOffset->x = glOffset.x;
1079 vkOffset->y = glOffset.y;
1080 vkOffset->z = glOffset.z;
1081 }
1082
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1083 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1084 {
1085 vkExtent->width = glExtent.width;
1086 vkExtent->height = glExtent.height;
1087 vkExtent->depth = glExtent.depth;
1088 }
1089
GetImageType(gl::TextureType textureType)1090 VkImageType GetImageType(gl::TextureType textureType)
1091 {
1092 switch (textureType)
1093 {
1094 case gl::TextureType::_2D:
1095 case gl::TextureType::_2DArray:
1096 case gl::TextureType::_2DMultisample:
1097 case gl::TextureType::_2DMultisampleArray:
1098 case gl::TextureType::CubeMap:
1099 case gl::TextureType::External:
1100 return VK_IMAGE_TYPE_2D;
1101 case gl::TextureType::_3D:
1102 return VK_IMAGE_TYPE_3D;
1103 default:
1104 // We will need to implement all the texture types for ES3+.
1105 UNIMPLEMENTED();
1106 return VK_IMAGE_TYPE_MAX_ENUM;
1107 }
1108 }
1109
GetImageViewType(gl::TextureType textureType)1110 VkImageViewType GetImageViewType(gl::TextureType textureType)
1111 {
1112 switch (textureType)
1113 {
1114 case gl::TextureType::_2D:
1115 case gl::TextureType::_2DMultisample:
1116 case gl::TextureType::External:
1117 return VK_IMAGE_VIEW_TYPE_2D;
1118 case gl::TextureType::_2DArray:
1119 case gl::TextureType::_2DMultisampleArray:
1120 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1121 case gl::TextureType::_3D:
1122 return VK_IMAGE_VIEW_TYPE_3D;
1123 case gl::TextureType::CubeMap:
1124 return VK_IMAGE_VIEW_TYPE_CUBE;
1125 default:
1126 // We will need to implement all the texture types for ES3+.
1127 UNIMPLEMENTED();
1128 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1129 }
1130 }
1131
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1132 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1133 {
1134 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1135 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1136 }
1137
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1138 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1139 {
1140 VkShaderStageFlags flags = 0;
1141 for (const gl::ShaderType shaderType : activeShaders)
1142 {
1143 flags |= kShaderStageMap[shaderType];
1144 }
1145 return flags;
1146 }
1147
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,GLint renderAreaHeight,VkViewport * viewportOut)1148 void GetViewport(const gl::Rectangle &viewport,
1149 float nearPlane,
1150 float farPlane,
1151 bool invertViewport,
1152 GLint renderAreaHeight,
1153 VkViewport *viewportOut)
1154 {
1155 viewportOut->x = static_cast<float>(viewport.x);
1156 viewportOut->y = static_cast<float>(viewport.y);
1157 viewportOut->width = static_cast<float>(viewport.width);
1158 viewportOut->height = static_cast<float>(viewport.height);
1159 viewportOut->minDepth = gl::clamp01(nearPlane);
1160 viewportOut->maxDepth = gl::clamp01(farPlane);
1161
1162 if (invertViewport)
1163 {
1164 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
1165 viewportOut->height = -viewportOut->height;
1166 }
1167 }
1168
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1169 void GetExtentsAndLayerCount(gl::TextureType textureType,
1170 const gl::Extents &extents,
1171 VkExtent3D *extentsOut,
1172 uint32_t *layerCountOut)
1173 {
1174 extentsOut->width = extents.width;
1175 extentsOut->height = extents.height;
1176
1177 switch (textureType)
1178 {
1179 case gl::TextureType::CubeMap:
1180 extentsOut->depth = 1;
1181 *layerCountOut = gl::kCubeFaceCount;
1182 break;
1183
1184 case gl::TextureType::_2DArray:
1185 case gl::TextureType::_2DMultisampleArray:
1186 extentsOut->depth = 1;
1187 *layerCountOut = extents.depth;
1188 break;
1189
1190 default:
1191 extentsOut->depth = extents.depth;
1192 *layerCountOut = 1;
1193 break;
1194 }
1195 }
1196 } // namespace gl_vk
1197
1198 namespace vk_gl
1199 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1200 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1201 {
1202 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1203 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1204 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1205 {
1206 setOut->insert(static_cast<GLuint>(1 << bit));
1207 }
1208 }
1209
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1210 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1211 {
1212 GLuint maxCount = 0;
1213 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1214 {
1215 maxCount = static_cast<GLuint>(1 << bit);
1216 }
1217 return maxCount;
1218 }
1219
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1220 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1221 {
1222 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1223 {
1224 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1225 if (sampleCount >= requestedCount)
1226 {
1227 return sampleCount;
1228 }
1229 }
1230
1231 UNREACHABLE();
1232 return 0;
1233 }
1234 } // namespace vk_gl
1235 } // namespace rx
1236