1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/renderer/vulkan/BufferVk.h"
14 #include "libANGLE/renderer/vulkan/CommandGraph.h"
15 #include "libANGLE/renderer/vulkan/ContextVk.h"
16 #include "libANGLE/renderer/vulkan/DisplayVk.h"
17 #include "libANGLE/renderer/vulkan/RendererVk.h"
18
19 namespace
20 {
GetStagingBufferUsageFlags(rx::vk::StagingUsage usage)21 VkImageUsageFlags GetStagingBufferUsageFlags(rx::vk::StagingUsage usage)
22 {
23 switch (usage)
24 {
25 case rx::vk::StagingUsage::Read:
26 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
27 case rx::vk::StagingUsage::Write:
28 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
29 case rx::vk::StagingUsage::Both:
30 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
31 default:
32 UNREACHABLE();
33 return 0;
34 }
35 }
36 } // anonymous namespace
37
38 namespace angle
39 {
ToEGL(Result result,rx::DisplayVk * displayVk,EGLint errorCode)40 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode)
41 {
42 if (result != angle::Result::Continue)
43 {
44 return displayVk->getEGLError(errorCode);
45 }
46 else
47 {
48 return egl::NoError();
49 }
50 }
51 } // namespace angle
52
53 namespace rx
54 {
55 // Mirrors std_validation_str in loader.c
56 const char *g_VkStdValidationLayerName = "VK_LAYER_LUNARG_standard_validation";
57 const char *g_VkValidationLayerNames[] = {
58 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
59 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
60 "VK_LAYER_GOOGLE_unique_objects"};
61
HasValidationLayer(const std::vector<VkLayerProperties> & layerProps,const char * layerName)62 bool HasValidationLayer(const std::vector<VkLayerProperties> &layerProps, const char *layerName)
63 {
64 for (const auto &layerProp : layerProps)
65 {
66 if (std::string(layerProp.layerName) == layerName)
67 {
68 return true;
69 }
70 }
71
72 return false;
73 }
74
HasStandardValidationLayer(const std::vector<VkLayerProperties> & layerProps)75 bool HasStandardValidationLayer(const std::vector<VkLayerProperties> &layerProps)
76 {
77 return HasValidationLayer(layerProps, g_VkStdValidationLayerName);
78 }
79
HasValidationLayers(const std::vector<VkLayerProperties> & layerProps)80 bool HasValidationLayers(const std::vector<VkLayerProperties> &layerProps)
81 {
82 for (const char *layerName : g_VkValidationLayerNames)
83 {
84 if (!HasValidationLayer(layerProps, layerName))
85 {
86 return false;
87 }
88 }
89
90 return true;
91 }
92
FindAndAllocateCompatibleMemory(vk::Context * context,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,vk::DeviceMemory * deviceMemoryOut)93 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
94 const vk::MemoryProperties &memoryProperties,
95 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
96 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
97 const VkMemoryRequirements &memoryRequirements,
98 const void *extraAllocationInfo,
99 vk::DeviceMemory *deviceMemoryOut)
100 {
101 uint32_t memoryTypeIndex = 0;
102 ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(context, memoryRequirements,
103 requestedMemoryPropertyFlags,
104 memoryPropertyFlagsOut, &memoryTypeIndex));
105
106 VkMemoryAllocateInfo allocInfo = {};
107 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
108 allocInfo.pNext = extraAllocationInfo;
109 allocInfo.memoryTypeIndex = memoryTypeIndex;
110 allocInfo.allocationSize = memoryRequirements.size;
111
112 ANGLE_VK_TRY(context, deviceMemoryOut->allocate(context->getDevice(), allocInfo));
113 return angle::Result::Continue;
114 }
115
116 template <typename T>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut)117 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
118 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
119 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
120 const VkMemoryRequirements &memoryRequirements,
121 const void *extraAllocationInfo,
122 T *bufferOrImage,
123 vk::DeviceMemory *deviceMemoryOut)
124 {
125 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
126
127 ANGLE_TRY(FindAndAllocateCompatibleMemory(
128 context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
129 memoryRequirements, extraAllocationInfo, deviceMemoryOut));
130 ANGLE_VK_TRY(context, bufferOrImage->bindMemory(context->getDevice(), *deviceMemoryOut));
131 return angle::Result::Continue;
132 }
133
134 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut)135 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
136 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
137 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
138 const void *extraAllocationInfo,
139 T *bufferOrImage,
140 vk::DeviceMemory *deviceMemoryOut)
141 {
142 // Call driver to determine memory requirements.
143 VkMemoryRequirements memoryRequirements;
144 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
145
146 ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
147 context, requestedMemoryPropertyFlags, memoryPropertyFlagsOut, memoryRequirements,
148 extraAllocationInfo, bufferOrImage, deviceMemoryOut));
149
150 return angle::Result::Continue;
151 }
152
153 const char *g_VkLoaderLayersPathEnv = "VK_LAYER_PATH";
154 const char *g_VkICDPathEnv = "VK_ICD_FILENAMES";
155
VulkanResultString(VkResult result)156 const char *VulkanResultString(VkResult result)
157 {
158 switch (result)
159 {
160 case VK_SUCCESS:
161 return "Command successfully completed.";
162 case VK_NOT_READY:
163 return "A fence or query has not yet completed.";
164 case VK_TIMEOUT:
165 return "A wait operation has not completed in the specified time.";
166 case VK_EVENT_SET:
167 return "An event is signaled.";
168 case VK_EVENT_RESET:
169 return "An event is unsignaled.";
170 case VK_INCOMPLETE:
171 return "A return array was too small for the result.";
172 case VK_SUBOPTIMAL_KHR:
173 return "A swapchain no longer matches the surface properties exactly, but can still be "
174 "used to present to the surface successfully.";
175 case VK_ERROR_OUT_OF_HOST_MEMORY:
176 return "A host memory allocation has failed.";
177 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
178 return "A device memory allocation has failed.";
179 case VK_ERROR_INITIALIZATION_FAILED:
180 return "Initialization of an object could not be completed for implementation-specific "
181 "reasons.";
182 case VK_ERROR_DEVICE_LOST:
183 return "The logical or physical device has been lost.";
184 case VK_ERROR_MEMORY_MAP_FAILED:
185 return "Mapping of a memory object has failed.";
186 case VK_ERROR_LAYER_NOT_PRESENT:
187 return "A requested layer is not present or could not be loaded.";
188 case VK_ERROR_EXTENSION_NOT_PRESENT:
189 return "A requested extension is not supported.";
190 case VK_ERROR_FEATURE_NOT_PRESENT:
191 return "A requested feature is not supported.";
192 case VK_ERROR_INCOMPATIBLE_DRIVER:
193 return "The requested version of Vulkan is not supported by the driver or is otherwise "
194 "incompatible for implementation-specific reasons.";
195 case VK_ERROR_TOO_MANY_OBJECTS:
196 return "Too many objects of the type have already been created.";
197 case VK_ERROR_FORMAT_NOT_SUPPORTED:
198 return "A requested format is not supported on this device.";
199 case VK_ERROR_SURFACE_LOST_KHR:
200 return "A surface is no longer available.";
201 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
202 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
203 "non-Vulkan API.";
204 case VK_ERROR_OUT_OF_DATE_KHR:
205 return "A surface has changed in such a way that it is no longer compatible with the "
206 "swapchain.";
207 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
208 return "The display used by a swapchain does not use the same presentable image "
209 "layout, or is incompatible in a way that prevents sharing an image.";
210 case VK_ERROR_VALIDATION_FAILED_EXT:
211 return "The validation layers detected invalid API usage.";
212 default:
213 return "Unknown vulkan error code.";
214 }
215 }
216
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)217 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
218 bool mustHaveLayers,
219 VulkanLayerVector *enabledLayerNames)
220 {
221 if (HasStandardValidationLayer(layerProps))
222 {
223 enabledLayerNames->push_back(g_VkStdValidationLayerName);
224 }
225 else if (HasValidationLayers(layerProps))
226 {
227 for (const char *layerName : g_VkValidationLayerNames)
228 {
229 enabledLayerNames->push_back(layerName);
230 }
231 }
232 else
233 {
234 // Generate an error if the layers were explicitly requested, warning otherwise.
235 if (mustHaveLayers)
236 {
237 ERR() << "Vulkan validation layers are missing.";
238 }
239 else
240 {
241 WARN() << "Vulkan validation layers are missing.";
242 }
243
244 return false;
245 }
246
247 return true;
248 }
249
250 namespace vk
251 {
GetDepthStencilAspectFlags(const angle::Format & format)252 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
253 {
254 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
255 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
256 }
257
GetFormatAspectFlags(const angle::Format & format)258 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
259 {
260 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
261 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
262 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
263 // channels.
264 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
265 }
266
267 // Context implementation.
Context(RendererVk * renderer)268 Context::Context(RendererVk *renderer) : mRenderer(renderer) {}
269
~Context()270 Context::~Context() {}
271
getDevice() const272 VkDevice Context::getDevice() const
273 {
274 return mRenderer->getDevice();
275 }
276
277 // MemoryProperties implementation.
MemoryProperties()278 MemoryProperties::MemoryProperties() : mMemoryProperties{0} {}
279
init(VkPhysicalDevice physicalDevice)280 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
281 {
282 ASSERT(mMemoryProperties.memoryTypeCount == 0);
283 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
284 ASSERT(mMemoryProperties.memoryTypeCount > 0);
285 }
286
destroy()287 void MemoryProperties::destroy()
288 {
289 mMemoryProperties = {0};
290 }
291
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const292 angle::Result MemoryProperties::findCompatibleMemoryIndex(
293 Context *context,
294 const VkMemoryRequirements &memoryRequirements,
295 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
296 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
297 uint32_t *typeIndexOut) const
298 {
299 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
300
301 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
302 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
303 // TODO(jmadill): Determine if it is possible to cache indexes.
304 // TODO(jmadill): More efficient memory allocation.
305 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
306 {
307 ASSERT(memoryIndex < mMemoryProperties.memoryTypeCount);
308
309 if ((mMemoryProperties.memoryTypes[memoryIndex].propertyFlags &
310 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
311 {
312 *memoryPropertyFlagsOut = mMemoryProperties.memoryTypes[memoryIndex].propertyFlags;
313 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
314 return angle::Result::Continue;
315 }
316 }
317
318 // TODO(jmadill): Add error message to error.
319 context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
320 return angle::Result::Stop;
321 }
322
323 // StagingBuffer implementation.
StagingBuffer()324 StagingBuffer::StagingBuffer() : mSize(0) {}
325
destroy(VkDevice device)326 void StagingBuffer::destroy(VkDevice device)
327 {
328 mBuffer.destroy(device);
329 mDeviceMemory.destroy(device);
330 mSize = 0;
331 }
332
init(Context * context,VkDeviceSize size,StagingUsage usage)333 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
334 {
335 VkBufferCreateInfo createInfo = {};
336 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
337 createInfo.flags = 0;
338 createInfo.size = size;
339 createInfo.usage = GetStagingBufferUsageFlags(usage);
340 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
341 createInfo.queueFamilyIndexCount = 0;
342 createInfo.pQueueFamilyIndices = nullptr;
343
344 VkMemoryPropertyFlags flags =
345 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
346
347 ANGLE_VK_TRY(context, mBuffer.init(context->getDevice(), createInfo));
348 VkMemoryPropertyFlags flagsOut = 0;
349 ANGLE_TRY(AllocateBufferMemory(context, flags, &flagsOut, nullptr, &mBuffer, &mDeviceMemory));
350 mSize = static_cast<size_t>(size);
351 return angle::Result::Continue;
352 }
353
dumpResources(Serial serial,std::vector<vk::GarbageObject> * garbageQueue)354 void StagingBuffer::dumpResources(Serial serial, std::vector<vk::GarbageObject> *garbageQueue)
355 {
356 mBuffer.dumpResources(serial, garbageQueue);
357 mDeviceMemory.dumpResources(serial, garbageQueue);
358 }
359
AllocateBufferMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,DeviceMemory * deviceMemoryOut)360 angle::Result AllocateBufferMemory(vk::Context *context,
361 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
362 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
363 const void *extraAllocationInfo,
364 Buffer *buffer,
365 DeviceMemory *deviceMemoryOut)
366 {
367 return AllocateBufferOrImageMemory(context, requestedMemoryPropertyFlags,
368 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
369 deviceMemoryOut);
370 }
371
AllocateImageMemory(vk::Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut)372 angle::Result AllocateImageMemory(vk::Context *context,
373 VkMemoryPropertyFlags memoryPropertyFlags,
374 const void *extraAllocationInfo,
375 Image *image,
376 DeviceMemory *deviceMemoryOut)
377 {
378 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
379 return AllocateBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
380 extraAllocationInfo, image, deviceMemoryOut);
381 }
382
AllocateImageMemoryWithRequirements(vk::Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut)383 angle::Result AllocateImageMemoryWithRequirements(vk::Context *context,
384 VkMemoryPropertyFlags memoryPropertyFlags,
385 const VkMemoryRequirements &memoryRequirements,
386 const void *extraAllocationInfo,
387 Image *image,
388 DeviceMemory *deviceMemoryOut)
389 {
390 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
391 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
392 memoryRequirements, extraAllocationInfo, image,
393 deviceMemoryOut);
394 }
395
InitShaderAndSerial(Context * context,ShaderAndSerial * shaderAndSerial,const uint32_t * shaderCode,size_t shaderCodeSize)396 angle::Result InitShaderAndSerial(Context *context,
397 ShaderAndSerial *shaderAndSerial,
398 const uint32_t *shaderCode,
399 size_t shaderCodeSize)
400 {
401 VkShaderModuleCreateInfo createInfo = {};
402 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
403 createInfo.flags = 0;
404 createInfo.codeSize = shaderCodeSize;
405 createInfo.pCode = shaderCode;
406
407 ANGLE_VK_TRY(context, shaderAndSerial->get().init(context->getDevice(), createInfo));
408 shaderAndSerial->updateSerial(context->getRenderer()->issueShaderSerial());
409 return angle::Result::Continue;
410 }
411
Get2DTextureType(uint32_t layerCount,GLint samples)412 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
413 {
414 if (layerCount > 1)
415 {
416 if (samples > 1)
417 {
418 return gl::TextureType::_2DMultisampleArray;
419 }
420 else
421 {
422 return gl::TextureType::_2DArray;
423 }
424 }
425 else
426 {
427 if (samples > 1)
428 {
429 return gl::TextureType::_2DMultisample;
430 }
431 else
432 {
433 return gl::TextureType::_2D;
434 }
435 }
436 }
437
GarbageObjectBase()438 GarbageObjectBase::GarbageObjectBase() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE)
439 {}
440
441 // GarbageObjectBase implementation
destroy(VkDevice device)442 void GarbageObjectBase::destroy(VkDevice device)
443 {
444 switch (mHandleType)
445 {
446 case HandleType::Semaphore:
447 vkDestroySemaphore(device, reinterpret_cast<VkSemaphore>(mHandle), nullptr);
448 break;
449 case HandleType::CommandBuffer:
450 // Command buffers are pool allocated.
451 UNREACHABLE();
452 break;
453 case HandleType::Event:
454 vkDestroyEvent(device, reinterpret_cast<VkEvent>(mHandle), nullptr);
455 break;
456 case HandleType::Fence:
457 vkDestroyFence(device, reinterpret_cast<VkFence>(mHandle), nullptr);
458 break;
459 case HandleType::DeviceMemory:
460 vkFreeMemory(device, reinterpret_cast<VkDeviceMemory>(mHandle), nullptr);
461 break;
462 case HandleType::Buffer:
463 vkDestroyBuffer(device, reinterpret_cast<VkBuffer>(mHandle), nullptr);
464 break;
465 case HandleType::BufferView:
466 vkDestroyBufferView(device, reinterpret_cast<VkBufferView>(mHandle), nullptr);
467 break;
468 case HandleType::Image:
469 vkDestroyImage(device, reinterpret_cast<VkImage>(mHandle), nullptr);
470 break;
471 case HandleType::ImageView:
472 vkDestroyImageView(device, reinterpret_cast<VkImageView>(mHandle), nullptr);
473 break;
474 case HandleType::ShaderModule:
475 vkDestroyShaderModule(device, reinterpret_cast<VkShaderModule>(mHandle), nullptr);
476 break;
477 case HandleType::PipelineLayout:
478 vkDestroyPipelineLayout(device, reinterpret_cast<VkPipelineLayout>(mHandle), nullptr);
479 break;
480 case HandleType::RenderPass:
481 vkDestroyRenderPass(device, reinterpret_cast<VkRenderPass>(mHandle), nullptr);
482 break;
483 case HandleType::Pipeline:
484 vkDestroyPipeline(device, reinterpret_cast<VkPipeline>(mHandle), nullptr);
485 break;
486 case HandleType::DescriptorSetLayout:
487 vkDestroyDescriptorSetLayout(device, reinterpret_cast<VkDescriptorSetLayout>(mHandle),
488 nullptr);
489 break;
490 case HandleType::Sampler:
491 vkDestroySampler(device, reinterpret_cast<VkSampler>(mHandle), nullptr);
492 break;
493 case HandleType::DescriptorPool:
494 vkDestroyDescriptorPool(device, reinterpret_cast<VkDescriptorPool>(mHandle), nullptr);
495 break;
496 case HandleType::Framebuffer:
497 vkDestroyFramebuffer(device, reinterpret_cast<VkFramebuffer>(mHandle), nullptr);
498 break;
499 case HandleType::CommandPool:
500 vkDestroyCommandPool(device, reinterpret_cast<VkCommandPool>(mHandle), nullptr);
501 break;
502 case HandleType::QueryPool:
503 vkDestroyQueryPool(device, reinterpret_cast<VkQueryPool>(mHandle), nullptr);
504 break;
505 default:
506 UNREACHABLE();
507 break;
508 }
509 }
510
511 // GarbageObject implementation.
GarbageObject()512 GarbageObject::GarbageObject() : mSerial() {}
513
514 GarbageObject::GarbageObject(const GarbageObject &other) = default;
515
516 GarbageObject &GarbageObject::operator=(const GarbageObject &other) = default;
517
destroyIfComplete(VkDevice device,Serial completedSerial)518 bool GarbageObject::destroyIfComplete(VkDevice device, Serial completedSerial)
519 {
520 if (completedSerial >= mSerial)
521 {
522 destroy(device);
523 return true;
524 }
525
526 return false;
527 }
528
529 } // namespace vk
530
531 // VK_EXT_debug_utils
532 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
533 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
534 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
535 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
536 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
537
538 // VK_EXT_debug_report
539 PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
540 PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
541
542 // VK_KHR_get_physical_device_properties2
543 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
544
545 // VK_KHR_external_semaphore_fd
546 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
547
548 #if defined(ANGLE_PLATFORM_FUCHSIA)
549 // VK_FUCHSIA_imagepipe_surface
550 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
551 #endif
552
553 #define GET_FUNC(vkName) \
554 do \
555 { \
556 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
557 ASSERT(vkName); \
558 } while (0)
559
InitDebugUtilsEXTFunctions(VkInstance instance)560 void InitDebugUtilsEXTFunctions(VkInstance instance)
561 {
562 GET_FUNC(vkCreateDebugUtilsMessengerEXT);
563 GET_FUNC(vkDestroyDebugUtilsMessengerEXT);
564 GET_FUNC(vkCmdBeginDebugUtilsLabelEXT);
565 GET_FUNC(vkCmdEndDebugUtilsLabelEXT);
566 GET_FUNC(vkCmdInsertDebugUtilsLabelEXT);
567 }
568
InitDebugReportEXTFunctions(VkInstance instance)569 void InitDebugReportEXTFunctions(VkInstance instance)
570 {
571 GET_FUNC(vkCreateDebugReportCallbackEXT);
572 GET_FUNC(vkDestroyDebugReportCallbackEXT);
573 }
574
InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)575 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)
576 {
577 GET_FUNC(vkGetPhysicalDeviceProperties2KHR);
578 }
579
580 #if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)581 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
582 {
583 GET_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
584 }
585 #endif
586
587 #if defined(ANGLE_PLATFORM_ANDROID)
588 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
589 nullptr;
590 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)591 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)
592 {
593 GET_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
594 GET_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
595 }
596 #endif
597
InitExternalSemaphoreFdFunctions(VkInstance instance)598 void InitExternalSemaphoreFdFunctions(VkInstance instance)
599 {
600 GET_FUNC(vkImportSemaphoreFdKHR);
601 }
602
603 #undef GET_FUNC
604
605 namespace gl_vk
606 {
607
GetFilter(const GLenum filter)608 VkFilter GetFilter(const GLenum filter)
609 {
610 switch (filter)
611 {
612 case GL_LINEAR_MIPMAP_LINEAR:
613 case GL_LINEAR_MIPMAP_NEAREST:
614 case GL_LINEAR:
615 return VK_FILTER_LINEAR;
616 case GL_NEAREST_MIPMAP_LINEAR:
617 case GL_NEAREST_MIPMAP_NEAREST:
618 case GL_NEAREST:
619 return VK_FILTER_NEAREST;
620 default:
621 UNIMPLEMENTED();
622 return VK_FILTER_MAX_ENUM;
623 }
624 }
625
GetSamplerMipmapMode(const GLenum filter)626 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
627 {
628 switch (filter)
629 {
630 case GL_LINEAR:
631 case GL_LINEAR_MIPMAP_LINEAR:
632 case GL_NEAREST_MIPMAP_LINEAR:
633 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
634 case GL_NEAREST:
635 case GL_NEAREST_MIPMAP_NEAREST:
636 case GL_LINEAR_MIPMAP_NEAREST:
637 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
638 default:
639 UNIMPLEMENTED();
640 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
641 }
642 }
643
GetSamplerAddressMode(const GLenum wrap)644 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
645 {
646 switch (wrap)
647 {
648 case GL_REPEAT:
649 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
650 case GL_MIRRORED_REPEAT:
651 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
652 case GL_CLAMP_TO_BORDER:
653 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
654 case GL_CLAMP_TO_EDGE:
655 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
656 default:
657 UNIMPLEMENTED();
658 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
659 }
660 }
661
GetRect(const gl::Rectangle & source)662 VkRect2D GetRect(const gl::Rectangle &source)
663 {
664 return {{source.x, source.y},
665 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
666 }
667
GetPrimitiveTopology(gl::PrimitiveMode mode)668 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
669 {
670 switch (mode)
671 {
672 case gl::PrimitiveMode::Triangles:
673 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
674 case gl::PrimitiveMode::Points:
675 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
676 case gl::PrimitiveMode::Lines:
677 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
678 case gl::PrimitiveMode::LineStrip:
679 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
680 case gl::PrimitiveMode::TriangleFan:
681 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
682 case gl::PrimitiveMode::TriangleStrip:
683 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
684 case gl::PrimitiveMode::LineLoop:
685 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
686 default:
687 UNREACHABLE();
688 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
689 }
690 }
691
GetCullMode(const gl::RasterizerState & rasterState)692 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
693 {
694 if (!rasterState.cullFace)
695 {
696 return VK_CULL_MODE_NONE;
697 }
698
699 switch (rasterState.cullMode)
700 {
701 case gl::CullFaceMode::Front:
702 return VK_CULL_MODE_FRONT_BIT;
703 case gl::CullFaceMode::Back:
704 return VK_CULL_MODE_BACK_BIT;
705 case gl::CullFaceMode::FrontAndBack:
706 return VK_CULL_MODE_FRONT_AND_BACK;
707 default:
708 UNREACHABLE();
709 return VK_CULL_MODE_NONE;
710 }
711 }
712
GetFrontFace(GLenum frontFace,bool invertCullFace)713 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
714 {
715 // Invert CW and CCW to have the same behavior as OpenGL.
716 switch (frontFace)
717 {
718 case GL_CW:
719 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
720 case GL_CCW:
721 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
722 default:
723 UNREACHABLE();
724 return VK_FRONT_FACE_CLOCKWISE;
725 }
726 }
727
GetSamples(GLint sampleCount)728 VkSampleCountFlagBits GetSamples(GLint sampleCount)
729 {
730 switch (sampleCount)
731 {
732 case 0:
733 case 1:
734 return VK_SAMPLE_COUNT_1_BIT;
735 case 2:
736 return VK_SAMPLE_COUNT_2_BIT;
737 case 4:
738 return VK_SAMPLE_COUNT_4_BIT;
739 case 8:
740 return VK_SAMPLE_COUNT_8_BIT;
741 case 16:
742 return VK_SAMPLE_COUNT_16_BIT;
743 case 32:
744 return VK_SAMPLE_COUNT_32_BIT;
745 default:
746 UNREACHABLE();
747 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
748 }
749 }
750
GetSwizzle(const GLenum swizzle)751 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
752 {
753 switch (swizzle)
754 {
755 case GL_ALPHA:
756 return VK_COMPONENT_SWIZZLE_A;
757 case GL_RED:
758 return VK_COMPONENT_SWIZZLE_R;
759 case GL_GREEN:
760 return VK_COMPONENT_SWIZZLE_G;
761 case GL_BLUE:
762 return VK_COMPONENT_SWIZZLE_B;
763 case GL_ZERO:
764 return VK_COMPONENT_SWIZZLE_ZERO;
765 case GL_ONE:
766 return VK_COMPONENT_SWIZZLE_ONE;
767 default:
768 UNREACHABLE();
769 return VK_COMPONENT_SWIZZLE_IDENTITY;
770 }
771 }
772
GetCompareOp(const GLenum compareFunc)773 VkCompareOp GetCompareOp(const GLenum compareFunc)
774 {
775 switch (compareFunc)
776 {
777 case GL_NEVER:
778 return VK_COMPARE_OP_NEVER;
779 case GL_LESS:
780 return VK_COMPARE_OP_LESS;
781 case GL_EQUAL:
782 return VK_COMPARE_OP_EQUAL;
783 case GL_LEQUAL:
784 return VK_COMPARE_OP_LESS_OR_EQUAL;
785 case GL_GREATER:
786 return VK_COMPARE_OP_GREATER;
787 case GL_NOTEQUAL:
788 return VK_COMPARE_OP_NOT_EQUAL;
789 case GL_GEQUAL:
790 return VK_COMPARE_OP_GREATER_OR_EQUAL;
791 case GL_ALWAYS:
792 return VK_COMPARE_OP_ALWAYS;
793 default:
794 UNREACHABLE();
795 return VK_COMPARE_OP_ALWAYS;
796 }
797 }
798
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)799 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
800 {
801 vkOffset->x = glOffset.x;
802 vkOffset->y = glOffset.y;
803 vkOffset->z = glOffset.z;
804 }
805
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)806 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
807 {
808 vkExtent->width = glExtent.width;
809 vkExtent->height = glExtent.height;
810 vkExtent->depth = glExtent.depth;
811 }
812
GetImageType(gl::TextureType textureType)813 VkImageType GetImageType(gl::TextureType textureType)
814 {
815 switch (textureType)
816 {
817 case gl::TextureType::_2D:
818 case gl::TextureType::_2DArray:
819 case gl::TextureType::_2DMultisample:
820 case gl::TextureType::_2DMultisampleArray:
821 case gl::TextureType::CubeMap:
822 case gl::TextureType::External:
823 return VK_IMAGE_TYPE_2D;
824 case gl::TextureType::_3D:
825 return VK_IMAGE_TYPE_3D;
826 default:
827 // We will need to implement all the texture types for ES3+.
828 UNIMPLEMENTED();
829 return VK_IMAGE_TYPE_MAX_ENUM;
830 }
831 }
832
GetImageViewType(gl::TextureType textureType)833 VkImageViewType GetImageViewType(gl::TextureType textureType)
834 {
835 switch (textureType)
836 {
837 case gl::TextureType::_2D:
838 case gl::TextureType::_2DMultisample:
839 case gl::TextureType::External:
840 return VK_IMAGE_VIEW_TYPE_2D;
841 case gl::TextureType::_2DArray:
842 case gl::TextureType::_2DMultisampleArray:
843 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
844 case gl::TextureType::_3D:
845 return VK_IMAGE_VIEW_TYPE_3D;
846 case gl::TextureType::CubeMap:
847 return VK_IMAGE_VIEW_TYPE_CUBE;
848 default:
849 // We will need to implement all the texture types for ES3+.
850 UNIMPLEMENTED();
851 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
852 }
853 }
854
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)855 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
856 {
857 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
858 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
859 }
860
GetShaderStageFlags(gl::ShaderBitSet activeShaders)861 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
862 {
863 VkShaderStageFlags flags = 0;
864 for (const gl::ShaderType shaderType : activeShaders)
865 {
866 flags |= kShaderStageMap[shaderType];
867 }
868 return flags;
869 }
870
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,GLint renderAreaHeight,VkViewport * viewportOut)871 void GetViewport(const gl::Rectangle &viewport,
872 float nearPlane,
873 float farPlane,
874 bool invertViewport,
875 GLint renderAreaHeight,
876 VkViewport *viewportOut)
877 {
878 viewportOut->x = static_cast<float>(viewport.x);
879 viewportOut->y = static_cast<float>(viewport.y);
880 viewportOut->width = static_cast<float>(viewport.width);
881 viewportOut->height = static_cast<float>(viewport.height);
882 viewportOut->minDepth = gl::clamp01(nearPlane);
883 viewportOut->maxDepth = gl::clamp01(farPlane);
884
885 if (invertViewport)
886 {
887 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
888 viewportOut->height = -viewportOut->height;
889 }
890 }
891
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)892 void GetExtentsAndLayerCount(gl::TextureType textureType,
893 const gl::Extents &extents,
894 VkExtent3D *extentsOut,
895 uint32_t *layerCountOut)
896 {
897 extentsOut->width = extents.width;
898 extentsOut->height = extents.height;
899
900 switch (textureType)
901 {
902 case gl::TextureType::CubeMap:
903 extentsOut->depth = 1;
904 *layerCountOut = gl::kCubeFaceCount;
905 break;
906
907 case gl::TextureType::_2DArray:
908 case gl::TextureType::_2DMultisampleArray:
909 extentsOut->depth = 1;
910 *layerCountOut = extents.depth;
911 break;
912
913 default:
914 extentsOut->depth = extents.depth;
915 *layerCountOut = 1;
916 break;
917 }
918 }
919 } // namespace gl_vk
920
921 namespace vk_gl
922 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)923 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
924 {
925 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
926 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
927 for (size_t bit : angle::BitSet32<32>(sampleCounts))
928 {
929 setOut->insert(static_cast<GLuint>(1 << bit));
930 }
931 }
932
GetMaxSampleCount(VkSampleCountFlags sampleCounts)933 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
934 {
935 GLuint maxCount = 0;
936 for (size_t bit : angle::BitSet32<32>(sampleCounts))
937 {
938 maxCount = static_cast<GLuint>(1 << bit);
939 }
940 return maxCount;
941 }
942
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)943 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
944 {
945 for (size_t bit : angle::BitSet32<32>(supportedCounts))
946 {
947 GLuint sampleCount = static_cast<GLuint>(1 << bit);
948 if (sampleCount >= requestedCount)
949 {
950 return sampleCount;
951 }
952 }
953
954 UNREACHABLE();
955 return 0;
956 }
957 } // namespace vk_gl
958 } // namespace rx
959