1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/renderer/vulkan/BufferVk.h"
14 #include "libANGLE/renderer/vulkan/ContextVk.h"
15 #include "libANGLE/renderer/vulkan/DisplayVk.h"
16 #include "libANGLE/renderer/vulkan/RendererVk.h"
17 #include "libANGLE/renderer/vulkan/ResourceVk.h"
18 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
19
20 namespace angle
21 {
ToEGL(Result result,rx::DisplayVk * displayVk,EGLint errorCode)22 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode)
23 {
24 if (result != angle::Result::Continue)
25 {
26 return displayVk->getEGLError(errorCode);
27 }
28 else
29 {
30 return egl::NoError();
31 }
32 }
33 } // namespace angle
34
35 namespace rx
36 {
37 namespace
38 {
GetStagingBufferUsageFlags(vk::StagingUsage usage)39 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
40 {
41 switch (usage)
42 {
43 case vk::StagingUsage::Read:
44 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
45 case vk::StagingUsage::Write:
46 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
47 case vk::StagingUsage::Both:
48 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
49 default:
50 UNREACHABLE();
51 return 0;
52 }
53 }
54
FindAndAllocateCompatibleMemory(vk::Context * context,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,vk::DeviceMemory * deviceMemoryOut)55 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
56 const vk::MemoryProperties &memoryProperties,
57 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
58 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
59 const VkMemoryRequirements &memoryRequirements,
60 const void *extraAllocationInfo,
61 vk::DeviceMemory *deviceMemoryOut)
62 {
63 // Pick an arbitrary value to initialize non-zero memory for sanitization.
64 constexpr int kNonZeroInitValue = 55;
65
66 VkDevice device = context->getDevice();
67
68 uint32_t memoryTypeIndex = 0;
69 ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(context, memoryRequirements,
70 requestedMemoryPropertyFlags,
71 memoryPropertyFlagsOut, &memoryTypeIndex));
72
73 VkMemoryAllocateInfo allocInfo = {};
74 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
75 allocInfo.pNext = extraAllocationInfo;
76 allocInfo.memoryTypeIndex = memoryTypeIndex;
77 allocInfo.allocationSize = memoryRequirements.size;
78
79 ANGLE_VK_TRY(context, deviceMemoryOut->allocate(device, allocInfo));
80
81 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
82 // invalid values ensures our testing doesn't assume zero-initialized memory.
83 RendererVk *renderer = context->getRenderer();
84 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
85 {
86 if ((*memoryPropertyFlagsOut & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
87 {
88 // Can map the memory.
89 ANGLE_TRY(vk::InitMappableDeviceMemory(context, deviceMemoryOut,
90 memoryRequirements.size, kNonZeroInitValue,
91 *memoryPropertyFlagsOut));
92 }
93 }
94
95 return angle::Result::Continue;
96 }
97
98 template <typename T>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut)99 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
100 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
101 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
102 const VkMemoryRequirements &memoryRequirements,
103 const void *extraAllocationInfo,
104 T *bufferOrImage,
105 vk::DeviceMemory *deviceMemoryOut)
106 {
107 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
108
109 ANGLE_TRY(FindAndAllocateCompatibleMemory(
110 context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
111 memoryRequirements, extraAllocationInfo, deviceMemoryOut));
112 ANGLE_VK_TRY(context, bufferOrImage->bindMemory(context->getDevice(), *deviceMemoryOut));
113 return angle::Result::Continue;
114 }
115
116 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)117 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
118 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
119 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
120 const void *extraAllocationInfo,
121 T *bufferOrImage,
122 vk::DeviceMemory *deviceMemoryOut,
123 VkDeviceSize *sizeOut)
124 {
125 // Call driver to determine memory requirements.
126 VkMemoryRequirements memoryRequirements;
127 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
128
129 ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
130 context, requestedMemoryPropertyFlags, memoryPropertyFlagsOut, memoryRequirements,
131 extraAllocationInfo, bufferOrImage, deviceMemoryOut));
132
133 *sizeOut = memoryRequirements.size;
134
135 return angle::Result::Continue;
136 }
137
138 // Unified layer that includes full validation layer stack
139 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
140 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
141 const char *kVkValidationLayerNames[] = {
142 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
143 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
144 "VK_LAYER_GOOGLE_unique_objects"};
145
HasValidationLayer(const std::vector<VkLayerProperties> & layerProps,const char * layerName)146 bool HasValidationLayer(const std::vector<VkLayerProperties> &layerProps, const char *layerName)
147 {
148 for (const auto &layerProp : layerProps)
149 {
150 if (std::string(layerProp.layerName) == layerName)
151 {
152 return true;
153 }
154 }
155
156 return false;
157 }
158
HasKhronosValidationLayer(const std::vector<VkLayerProperties> & layerProps)159 bool HasKhronosValidationLayer(const std::vector<VkLayerProperties> &layerProps)
160 {
161 return HasValidationLayer(layerProps, kVkKhronosValidationLayerName);
162 }
163
HasStandardValidationLayer(const std::vector<VkLayerProperties> & layerProps)164 bool HasStandardValidationLayer(const std::vector<VkLayerProperties> &layerProps)
165 {
166 return HasValidationLayer(layerProps, kVkStandardValidationLayerName);
167 }
168
HasValidationLayers(const std::vector<VkLayerProperties> & layerProps)169 bool HasValidationLayers(const std::vector<VkLayerProperties> &layerProps)
170 {
171 for (const char *layerName : kVkValidationLayerNames)
172 {
173 if (!HasValidationLayer(layerProps, layerName))
174 {
175 return false;
176 }
177 }
178
179 return true;
180 }
181 } // anonymous namespace
182
VulkanResultString(VkResult result)183 const char *VulkanResultString(VkResult result)
184 {
185 switch (result)
186 {
187 case VK_SUCCESS:
188 return "Command successfully completed.";
189 case VK_NOT_READY:
190 return "A fence or query has not yet completed.";
191 case VK_TIMEOUT:
192 return "A wait operation has not completed in the specified time.";
193 case VK_EVENT_SET:
194 return "An event is signaled.";
195 case VK_EVENT_RESET:
196 return "An event is unsignaled.";
197 case VK_INCOMPLETE:
198 return "A return array was too small for the result.";
199 case VK_SUBOPTIMAL_KHR:
200 return "A swapchain no longer matches the surface properties exactly, but can still be "
201 "used to present to the surface successfully.";
202 case VK_ERROR_OUT_OF_HOST_MEMORY:
203 return "A host memory allocation has failed.";
204 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
205 return "A device memory allocation has failed.";
206 case VK_ERROR_INITIALIZATION_FAILED:
207 return "Initialization of an object could not be completed for implementation-specific "
208 "reasons.";
209 case VK_ERROR_DEVICE_LOST:
210 return "The logical or physical device has been lost.";
211 case VK_ERROR_MEMORY_MAP_FAILED:
212 return "Mapping of a memory object has failed.";
213 case VK_ERROR_LAYER_NOT_PRESENT:
214 return "A requested layer is not present or could not be loaded.";
215 case VK_ERROR_EXTENSION_NOT_PRESENT:
216 return "A requested extension is not supported.";
217 case VK_ERROR_FEATURE_NOT_PRESENT:
218 return "A requested feature is not supported.";
219 case VK_ERROR_INCOMPATIBLE_DRIVER:
220 return "The requested version of Vulkan is not supported by the driver or is otherwise "
221 "incompatible for implementation-specific reasons.";
222 case VK_ERROR_TOO_MANY_OBJECTS:
223 return "Too many objects of the type have already been created.";
224 case VK_ERROR_FORMAT_NOT_SUPPORTED:
225 return "A requested format is not supported on this device.";
226 case VK_ERROR_SURFACE_LOST_KHR:
227 return "A surface is no longer available.";
228 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
229 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
230 "non-Vulkan API.";
231 case VK_ERROR_OUT_OF_DATE_KHR:
232 return "A surface has changed in such a way that it is no longer compatible with the "
233 "swapchain.";
234 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
235 return "The display used by a swapchain does not use the same presentable image "
236 "layout, or is incompatible in a way that prevents sharing an image.";
237 case VK_ERROR_VALIDATION_FAILED_EXT:
238 return "The validation layers detected invalid API usage.";
239 case VK_ERROR_INVALID_SHADER_NV:
240 return "Invalid Vulkan shader was generated.";
241 default:
242 return "Unknown vulkan error code.";
243 }
244 }
245
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)246 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
247 bool mustHaveLayers,
248 VulkanLayerVector *enabledLayerNames)
249 {
250 // Favor unified Khronos layer, but fallback to standard validation
251 if (HasKhronosValidationLayer(layerProps))
252 {
253 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
254 }
255 else if (HasStandardValidationLayer(layerProps))
256 {
257 enabledLayerNames->push_back(kVkStandardValidationLayerName);
258 }
259 else if (HasValidationLayers(layerProps))
260 {
261 for (const char *layerName : kVkValidationLayerNames)
262 {
263 enabledLayerNames->push_back(layerName);
264 }
265 }
266 else
267 {
268 // Generate an error if the layers were explicitly requested, warning otherwise.
269 if (mustHaveLayers)
270 {
271 ERR() << "Vulkan validation layers are missing.";
272 }
273 else
274 {
275 WARN() << "Vulkan validation layers are missing.";
276 }
277
278 return false;
279 }
280
281 return true;
282 }
283
284 namespace vk
285 {
286 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
287 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
288 const char *gANGLEPreferredDevice = "ANGLE_PREFERRED_DEVICE";
289
GetDepthStencilAspectFlags(const angle::Format & format)290 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
291 {
292 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
293 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
294 }
295
GetFormatAspectFlags(const angle::Format & format)296 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
297 {
298 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
299 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
300 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
301 // channels.
302 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
303 }
304
305 // Context implementation.
Context(RendererVk * renderer)306 Context::Context(RendererVk *renderer) : mRenderer(renderer) {}
307
~Context()308 Context::~Context() {}
309
getDevice() const310 VkDevice Context::getDevice() const
311 {
312 return mRenderer->getDevice();
313 }
314
315 // MemoryProperties implementation.
MemoryProperties()316 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
317
init(VkPhysicalDevice physicalDevice)318 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
319 {
320 ASSERT(mMemoryProperties.memoryTypeCount == 0);
321 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
322 ASSERT(mMemoryProperties.memoryTypeCount > 0);
323 }
324
destroy()325 void MemoryProperties::destroy()
326 {
327 mMemoryProperties = {};
328 }
329
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const330 angle::Result MemoryProperties::findCompatibleMemoryIndex(
331 Context *context,
332 const VkMemoryRequirements &memoryRequirements,
333 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
334 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
335 uint32_t *typeIndexOut) const
336 {
337 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
338
339 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
340 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
341 // TODO(jmadill): Determine if it is possible to cache indexes.
342 // TODO(jmadill): More efficient memory allocation.
343 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
344 {
345 ASSERT(memoryIndex < mMemoryProperties.memoryTypeCount);
346
347 if ((mMemoryProperties.memoryTypes[memoryIndex].propertyFlags &
348 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
349 {
350 *memoryPropertyFlagsOut = mMemoryProperties.memoryTypes[memoryIndex].propertyFlags;
351 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
352 return angle::Result::Continue;
353 }
354 }
355
356 // We did not find a compatible memory type, the Vulkan spec says the following -
357 // There must be at least one memory type with both the
358 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
359 // bits set in its propertyFlags
360 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
361 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
362
363 // If the caller wanted a host visible memory, just return the memory index
364 // with the fallback memory flags.
365 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
366 {
367 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
368 {
369 if ((mMemoryProperties.memoryTypes[memoryIndex].propertyFlags &
370 fallbackMemoryPropertyFlags) == fallbackMemoryPropertyFlags)
371 {
372 *memoryPropertyFlagsOut = mMemoryProperties.memoryTypes[memoryIndex].propertyFlags;
373 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
374 return angle::Result::Continue;
375 }
376 }
377 }
378
379 // TODO(jmadill): Add error message to error.
380 context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
381 return angle::Result::Stop;
382 }
383
384 // StagingBuffer implementation.
StagingBuffer()385 StagingBuffer::StagingBuffer() : mSize(0) {}
386
destroy(RendererVk * renderer)387 void StagingBuffer::destroy(RendererVk *renderer)
388 {
389 VkDevice device = renderer->getDevice();
390 mBuffer.destroy(device);
391 mAllocation.destroy(renderer->getAllocator());
392 mSize = 0;
393 }
394
init(Context * context,VkDeviceSize size,StagingUsage usage)395 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
396 {
397 VkBufferCreateInfo createInfo = {};
398 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
399 createInfo.flags = 0;
400 createInfo.size = size;
401 createInfo.usage = GetStagingBufferUsageFlags(usage);
402 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
403 createInfo.queueFamilyIndexCount = 0;
404 createInfo.pQueueFamilyIndices = nullptr;
405
406 VkMemoryPropertyFlags memoryPropertyOutFlags;
407 VkMemoryPropertyFlags preferredFlags = 0;
408 VkMemoryPropertyFlags requiredFlags =
409 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
410
411 mAllocation.createBufferAndMemory(
412 context->getRenderer()->getAllocator(), &createInfo, requiredFlags, preferredFlags,
413 context->getRenderer()->getFeatures().persistentlyMappedBuffers.enabled, &mBuffer,
414 &memoryPropertyOutFlags);
415
416 mSize = static_cast<size_t>(size);
417 return angle::Result::Continue;
418 }
419
release(ContextVk * contextVk)420 void StagingBuffer::release(ContextVk *contextVk)
421 {
422 contextVk->addGarbage(&mBuffer);
423 contextVk->addGarbage(&mAllocation);
424 }
425
collectGarbage(RendererVk * renderer,Serial serial)426 void StagingBuffer::collectGarbage(RendererVk *renderer, Serial serial)
427 {
428 vk::GarbageList garbageList;
429 garbageList.emplace_back(vk::GetGarbage(&mBuffer));
430 garbageList.emplace_back(vk::GetGarbage(&mAllocation));
431
432 vk::SharedResourceUse sharedUse;
433 sharedUse.init();
434 sharedUse.updateSerialOneOff(serial);
435 renderer->collectGarbage(std::move(sharedUse), std::move(garbageList));
436 }
437
InitMappableAllocation(VmaAllocator allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)438 angle::Result InitMappableAllocation(VmaAllocator allocator,
439 Allocation *allocation,
440 VkDeviceSize size,
441 int value,
442 VkMemoryPropertyFlags memoryPropertyFlags)
443 {
444 uint8_t *mapPointer;
445 allocation->map(allocator, &mapPointer);
446 memset(mapPointer, value, static_cast<size_t>(size));
447
448 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
449 {
450 allocation->flush(allocator, 0, size);
451 }
452
453 allocation->unmap(allocator);
454
455 return angle::Result::Continue;
456 }
457
InitMappableDeviceMemory(Context * context,DeviceMemory * deviceMemory,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)458 angle::Result InitMappableDeviceMemory(Context *context,
459 DeviceMemory *deviceMemory,
460 VkDeviceSize size,
461 int value,
462 VkMemoryPropertyFlags memoryPropertyFlags)
463 {
464 VkDevice device = context->getDevice();
465
466 uint8_t *mapPointer;
467 ANGLE_VK_TRY(context, deviceMemory->map(device, 0, VK_WHOLE_SIZE, 0, &mapPointer));
468 memset(mapPointer, value, static_cast<size_t>(size));
469
470 // if the memory type is not host coherent, we perform an explicit flush
471 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
472 {
473 VkMappedMemoryRange mappedRange = {};
474 mappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
475 mappedRange.memory = deviceMemory->getHandle();
476 mappedRange.size = VK_WHOLE_SIZE;
477 ANGLE_VK_TRY(context, vkFlushMappedMemoryRanges(device, 1, &mappedRange));
478 }
479
480 deviceMemory->unmap(device);
481
482 return angle::Result::Continue;
483 }
484
AllocateBufferMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)485 angle::Result AllocateBufferMemory(vk::Context *context,
486 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
487 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
488 const void *extraAllocationInfo,
489 Buffer *buffer,
490 DeviceMemory *deviceMemoryOut,
491 VkDeviceSize *sizeOut)
492 {
493 return AllocateBufferOrImageMemory(context, requestedMemoryPropertyFlags,
494 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
495 deviceMemoryOut, sizeOut);
496 }
497
AllocateImageMemory(vk::Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)498 angle::Result AllocateImageMemory(vk::Context *context,
499 VkMemoryPropertyFlags memoryPropertyFlags,
500 const void *extraAllocationInfo,
501 Image *image,
502 DeviceMemory *deviceMemoryOut,
503 VkDeviceSize *sizeOut)
504 {
505 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
506 return AllocateBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
507 extraAllocationInfo, image, deviceMemoryOut, sizeOut);
508 }
509
AllocateImageMemoryWithRequirements(vk::Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut)510 angle::Result AllocateImageMemoryWithRequirements(vk::Context *context,
511 VkMemoryPropertyFlags memoryPropertyFlags,
512 const VkMemoryRequirements &memoryRequirements,
513 const void *extraAllocationInfo,
514 Image *image,
515 DeviceMemory *deviceMemoryOut)
516 {
517 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
518 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
519 memoryRequirements, extraAllocationInfo, image,
520 deviceMemoryOut);
521 }
522
InitShaderAndSerial(Context * context,ShaderAndSerial * shaderAndSerial,const uint32_t * shaderCode,size_t shaderCodeSize)523 angle::Result InitShaderAndSerial(Context *context,
524 ShaderAndSerial *shaderAndSerial,
525 const uint32_t *shaderCode,
526 size_t shaderCodeSize)
527 {
528 VkShaderModuleCreateInfo createInfo = {};
529 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
530 createInfo.flags = 0;
531 createInfo.codeSize = shaderCodeSize;
532 createInfo.pCode = shaderCode;
533
534 ANGLE_VK_TRY(context, shaderAndSerial->get().init(context->getDevice(), createInfo));
535 shaderAndSerial->updateSerial(context->getRenderer()->issueShaderSerial());
536 return angle::Result::Continue;
537 }
538
Get2DTextureType(uint32_t layerCount,GLint samples)539 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
540 {
541 if (layerCount > 1)
542 {
543 if (samples > 1)
544 {
545 return gl::TextureType::_2DMultisampleArray;
546 }
547 else
548 {
549 return gl::TextureType::_2DArray;
550 }
551 }
552 else
553 {
554 if (samples > 1)
555 {
556 return gl::TextureType::_2DMultisample;
557 }
558 else
559 {
560 return gl::TextureType::_2D;
561 }
562 }
563 }
564
GarbageObject()565 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
566
GarbageObject(HandleType handleType,GarbageHandle handle)567 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
568 : mHandleType(handleType), mHandle(handle)
569 {}
570
GarbageObject(GarbageObject && other)571 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
572 {
573 *this = std::move(other);
574 }
575
operator =(GarbageObject && rhs)576 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
577 {
578 std::swap(mHandle, rhs.mHandle);
579 std::swap(mHandleType, rhs.mHandleType);
580 return *this;
581 }
582
583 // GarbageObject implementation
584 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
585 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(RendererVk * renderer)586 void GarbageObject::destroy(RendererVk *renderer)
587 {
588 VkDevice device = renderer->getDevice();
589 switch (mHandleType)
590 {
591 case HandleType::Semaphore:
592 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
593 break;
594 case HandleType::CommandBuffer:
595 // Command buffers are pool allocated.
596 UNREACHABLE();
597 break;
598 case HandleType::Event:
599 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
600 break;
601 case HandleType::Fence:
602 vkDestroyFence(device, (VkFence)mHandle, nullptr);
603 break;
604 case HandleType::DeviceMemory:
605 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
606 break;
607 case HandleType::Buffer:
608 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
609 break;
610 case HandleType::BufferView:
611 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
612 break;
613 case HandleType::Image:
614 vkDestroyImage(device, (VkImage)mHandle, nullptr);
615 break;
616 case HandleType::ImageView:
617 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
618 break;
619 case HandleType::ShaderModule:
620 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
621 break;
622 case HandleType::PipelineLayout:
623 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
624 break;
625 case HandleType::RenderPass:
626 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
627 break;
628 case HandleType::Pipeline:
629 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
630 break;
631 case HandleType::DescriptorSetLayout:
632 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
633 break;
634 case HandleType::Sampler:
635 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
636 break;
637 case HandleType::DescriptorPool:
638 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
639 break;
640 case HandleType::Framebuffer:
641 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
642 break;
643 case HandleType::CommandPool:
644 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
645 break;
646 case HandleType::QueryPool:
647 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
648 break;
649 case HandleType::Allocation:
650 vma::FreeMemory(renderer->getAllocator(), (VmaAllocation)mHandle);
651 break;
652 default:
653 UNREACHABLE();
654 break;
655 }
656 }
657
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)658 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
659 {
660 static constexpr angle::ColorF kLabelColors[6] = {
661 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
662 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
663 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
664 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
665 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
666 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
667 };
668
669 int colorIndex = source - GL_DEBUG_SOURCE_API;
670 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
671
672 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
673 label->pNext = nullptr;
674 label->pLabelName = marker;
675 kLabelColors[colorIndex].writeData(label->color);
676 }
677 } // namespace vk
678
679 namespace gl_vk
680 {
681
GetFilter(const GLenum filter)682 VkFilter GetFilter(const GLenum filter)
683 {
684 switch (filter)
685 {
686 case GL_LINEAR_MIPMAP_LINEAR:
687 case GL_LINEAR_MIPMAP_NEAREST:
688 case GL_LINEAR:
689 return VK_FILTER_LINEAR;
690 case GL_NEAREST_MIPMAP_LINEAR:
691 case GL_NEAREST_MIPMAP_NEAREST:
692 case GL_NEAREST:
693 return VK_FILTER_NEAREST;
694 default:
695 UNIMPLEMENTED();
696 return VK_FILTER_MAX_ENUM;
697 }
698 }
699
GetSamplerMipmapMode(const GLenum filter)700 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
701 {
702 switch (filter)
703 {
704 case GL_LINEAR_MIPMAP_LINEAR:
705 case GL_NEAREST_MIPMAP_LINEAR:
706 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
707 case GL_LINEAR:
708 case GL_NEAREST:
709 case GL_NEAREST_MIPMAP_NEAREST:
710 case GL_LINEAR_MIPMAP_NEAREST:
711 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
712 default:
713 UNIMPLEMENTED();
714 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
715 }
716 }
717
GetSamplerAddressMode(const GLenum wrap)718 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
719 {
720 switch (wrap)
721 {
722 case GL_REPEAT:
723 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
724 case GL_MIRRORED_REPEAT:
725 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
726 case GL_CLAMP_TO_BORDER:
727 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
728 case GL_CLAMP_TO_EDGE:
729 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
730 default:
731 UNIMPLEMENTED();
732 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
733 }
734 }
735
GetRect(const gl::Rectangle & source)736 VkRect2D GetRect(const gl::Rectangle &source)
737 {
738 return {{source.x, source.y},
739 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
740 }
741
GetPrimitiveTopology(gl::PrimitiveMode mode)742 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
743 {
744 switch (mode)
745 {
746 case gl::PrimitiveMode::Triangles:
747 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
748 case gl::PrimitiveMode::Points:
749 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
750 case gl::PrimitiveMode::Lines:
751 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
752 case gl::PrimitiveMode::LineStrip:
753 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
754 case gl::PrimitiveMode::TriangleFan:
755 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
756 case gl::PrimitiveMode::TriangleStrip:
757 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
758 case gl::PrimitiveMode::LineLoop:
759 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
760 default:
761 UNREACHABLE();
762 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
763 }
764 }
765
GetCullMode(const gl::RasterizerState & rasterState)766 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
767 {
768 if (!rasterState.cullFace)
769 {
770 return VK_CULL_MODE_NONE;
771 }
772
773 switch (rasterState.cullMode)
774 {
775 case gl::CullFaceMode::Front:
776 return VK_CULL_MODE_FRONT_BIT;
777 case gl::CullFaceMode::Back:
778 return VK_CULL_MODE_BACK_BIT;
779 case gl::CullFaceMode::FrontAndBack:
780 return VK_CULL_MODE_FRONT_AND_BACK;
781 default:
782 UNREACHABLE();
783 return VK_CULL_MODE_NONE;
784 }
785 }
786
GetFrontFace(GLenum frontFace,bool invertCullFace)787 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
788 {
789 // Invert CW and CCW to have the same behavior as OpenGL.
790 switch (frontFace)
791 {
792 case GL_CW:
793 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
794 case GL_CCW:
795 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
796 default:
797 UNREACHABLE();
798 return VK_FRONT_FACE_CLOCKWISE;
799 }
800 }
801
GetSamples(GLint sampleCount)802 VkSampleCountFlagBits GetSamples(GLint sampleCount)
803 {
804 switch (sampleCount)
805 {
806 case 0:
807 case 1:
808 return VK_SAMPLE_COUNT_1_BIT;
809 case 2:
810 return VK_SAMPLE_COUNT_2_BIT;
811 case 4:
812 return VK_SAMPLE_COUNT_4_BIT;
813 case 8:
814 return VK_SAMPLE_COUNT_8_BIT;
815 case 16:
816 return VK_SAMPLE_COUNT_16_BIT;
817 case 32:
818 return VK_SAMPLE_COUNT_32_BIT;
819 default:
820 UNREACHABLE();
821 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
822 }
823 }
824
GetSwizzle(const GLenum swizzle)825 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
826 {
827 switch (swizzle)
828 {
829 case GL_ALPHA:
830 return VK_COMPONENT_SWIZZLE_A;
831 case GL_RED:
832 return VK_COMPONENT_SWIZZLE_R;
833 case GL_GREEN:
834 return VK_COMPONENT_SWIZZLE_G;
835 case GL_BLUE:
836 return VK_COMPONENT_SWIZZLE_B;
837 case GL_ZERO:
838 return VK_COMPONENT_SWIZZLE_ZERO;
839 case GL_ONE:
840 return VK_COMPONENT_SWIZZLE_ONE;
841 default:
842 UNREACHABLE();
843 return VK_COMPONENT_SWIZZLE_IDENTITY;
844 }
845 }
846
GetCompareOp(const GLenum compareFunc)847 VkCompareOp GetCompareOp(const GLenum compareFunc)
848 {
849 switch (compareFunc)
850 {
851 case GL_NEVER:
852 return VK_COMPARE_OP_NEVER;
853 case GL_LESS:
854 return VK_COMPARE_OP_LESS;
855 case GL_EQUAL:
856 return VK_COMPARE_OP_EQUAL;
857 case GL_LEQUAL:
858 return VK_COMPARE_OP_LESS_OR_EQUAL;
859 case GL_GREATER:
860 return VK_COMPARE_OP_GREATER;
861 case GL_NOTEQUAL:
862 return VK_COMPARE_OP_NOT_EQUAL;
863 case GL_GEQUAL:
864 return VK_COMPARE_OP_GREATER_OR_EQUAL;
865 case GL_ALWAYS:
866 return VK_COMPARE_OP_ALWAYS;
867 default:
868 UNREACHABLE();
869 return VK_COMPARE_OP_ALWAYS;
870 }
871 }
872
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)873 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
874 {
875 vkOffset->x = glOffset.x;
876 vkOffset->y = glOffset.y;
877 vkOffset->z = glOffset.z;
878 }
879
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)880 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
881 {
882 vkExtent->width = glExtent.width;
883 vkExtent->height = glExtent.height;
884 vkExtent->depth = glExtent.depth;
885 }
886
GetImageType(gl::TextureType textureType)887 VkImageType GetImageType(gl::TextureType textureType)
888 {
889 switch (textureType)
890 {
891 case gl::TextureType::_2D:
892 case gl::TextureType::_2DArray:
893 case gl::TextureType::_2DMultisample:
894 case gl::TextureType::_2DMultisampleArray:
895 case gl::TextureType::CubeMap:
896 case gl::TextureType::External:
897 return VK_IMAGE_TYPE_2D;
898 case gl::TextureType::_3D:
899 return VK_IMAGE_TYPE_3D;
900 default:
901 // We will need to implement all the texture types for ES3+.
902 UNIMPLEMENTED();
903 return VK_IMAGE_TYPE_MAX_ENUM;
904 }
905 }
906
GetImageViewType(gl::TextureType textureType)907 VkImageViewType GetImageViewType(gl::TextureType textureType)
908 {
909 switch (textureType)
910 {
911 case gl::TextureType::_2D:
912 case gl::TextureType::_2DMultisample:
913 case gl::TextureType::External:
914 return VK_IMAGE_VIEW_TYPE_2D;
915 case gl::TextureType::_2DArray:
916 case gl::TextureType::_2DMultisampleArray:
917 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
918 case gl::TextureType::_3D:
919 return VK_IMAGE_VIEW_TYPE_3D;
920 case gl::TextureType::CubeMap:
921 return VK_IMAGE_VIEW_TYPE_CUBE;
922 default:
923 // We will need to implement all the texture types for ES3+.
924 UNIMPLEMENTED();
925 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
926 }
927 }
928
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)929 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
930 {
931 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
932 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
933 }
934
GetShaderStageFlags(gl::ShaderBitSet activeShaders)935 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
936 {
937 VkShaderStageFlags flags = 0;
938 for (const gl::ShaderType shaderType : activeShaders)
939 {
940 flags |= kShaderStageMap[shaderType];
941 }
942 return flags;
943 }
944
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,GLint renderAreaHeight,VkViewport * viewportOut)945 void GetViewport(const gl::Rectangle &viewport,
946 float nearPlane,
947 float farPlane,
948 bool invertViewport,
949 GLint renderAreaHeight,
950 VkViewport *viewportOut)
951 {
952 viewportOut->x = static_cast<float>(viewport.x);
953 viewportOut->y = static_cast<float>(viewport.y);
954 viewportOut->width = static_cast<float>(viewport.width);
955 viewportOut->height = static_cast<float>(viewport.height);
956 viewportOut->minDepth = gl::clamp01(nearPlane);
957 viewportOut->maxDepth = gl::clamp01(farPlane);
958
959 if (invertViewport)
960 {
961 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
962 viewportOut->height = -viewportOut->height;
963 }
964 }
965
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)966 void GetExtentsAndLayerCount(gl::TextureType textureType,
967 const gl::Extents &extents,
968 VkExtent3D *extentsOut,
969 uint32_t *layerCountOut)
970 {
971 extentsOut->width = extents.width;
972 extentsOut->height = extents.height;
973
974 switch (textureType)
975 {
976 case gl::TextureType::CubeMap:
977 extentsOut->depth = 1;
978 *layerCountOut = gl::kCubeFaceCount;
979 break;
980
981 case gl::TextureType::_2DArray:
982 case gl::TextureType::_2DMultisampleArray:
983 extentsOut->depth = 1;
984 *layerCountOut = extents.depth;
985 break;
986
987 default:
988 extentsOut->depth = extents.depth;
989 *layerCountOut = 1;
990 break;
991 }
992 }
993 } // namespace gl_vk
994
995 namespace vk_gl
996 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)997 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
998 {
999 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1000 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1001 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1002 {
1003 setOut->insert(static_cast<GLuint>(1 << bit));
1004 }
1005 }
1006
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1007 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1008 {
1009 GLuint maxCount = 0;
1010 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1011 {
1012 maxCount = static_cast<GLuint>(1 << bit);
1013 }
1014 return maxCount;
1015 }
1016
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1017 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1018 {
1019 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1020 {
1021 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1022 if (sampleCount >= requestedCount)
1023 {
1024 return sampleCount;
1025 }
1026 }
1027
1028 UNREACHABLE();
1029 return 0;
1030 }
1031 } // namespace vk_gl
1032 } // namespace rx
1033