1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/renderer/vulkan/BufferVk.h"
14 #include "libANGLE/renderer/vulkan/ContextVk.h"
15 #include "libANGLE/renderer/vulkan/DisplayVk.h"
16 #include "libANGLE/renderer/vulkan/RendererVk.h"
17 #include "libANGLE/renderer/vulkan/ResourceVk.h"
18 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
19
20 namespace angle
21 {
ToEGL(Result result,rx::DisplayVk * displayVk,EGLint errorCode)22 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode)
23 {
24 if (result != angle::Result::Continue)
25 {
26 return displayVk->getEGLError(errorCode);
27 }
28 else
29 {
30 return egl::NoError();
31 }
32 }
33 } // namespace angle
34
35 namespace rx
36 {
37 namespace
38 {
39 // Pick an arbitrary value to initialize non-zero memory for sanitization. Note that 0x3F3F3F3F
40 // as float is about 0.75.
41 constexpr int kNonZeroInitValue = 0x3F;
42
GetStagingBufferUsageFlags(vk::StagingUsage usage)43 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
44 {
45 switch (usage)
46 {
47 case vk::StagingUsage::Read:
48 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
49 case vk::StagingUsage::Write:
50 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
51 case vk::StagingUsage::Both:
52 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
53 default:
54 UNREACHABLE();
55 return 0;
56 }
57 }
58
FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties & memoryProperties,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut)59 bool FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties &memoryProperties,
60 const VkMemoryRequirements &memoryRequirements,
61 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
62 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
63 uint32_t *typeIndexOut)
64 {
65 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
66 {
67 ASSERT(memoryIndex < memoryProperties.memoryTypeCount);
68
69 if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
70 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
71 {
72 *memoryPropertyFlagsOut = memoryProperties.memoryTypes[memoryIndex].propertyFlags;
73 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
74 return true;
75 }
76 }
77
78 return false;
79 }
80
FindAndAllocateCompatibleMemory(vk::Context * context,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,vk::DeviceMemory * deviceMemoryOut)81 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
82 const vk::MemoryProperties &memoryProperties,
83 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
84 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
85 const VkMemoryRequirements &memoryRequirements,
86 const void *extraAllocationInfo,
87 vk::DeviceMemory *deviceMemoryOut)
88 {
89 VkDevice device = context->getDevice();
90
91 uint32_t memoryTypeIndex = 0;
92 ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(
93 context, memoryRequirements, requestedMemoryPropertyFlags, (extraAllocationInfo != nullptr),
94 memoryPropertyFlagsOut, &memoryTypeIndex));
95
96 VkMemoryAllocateInfo allocInfo = {};
97 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
98 allocInfo.pNext = extraAllocationInfo;
99 allocInfo.memoryTypeIndex = memoryTypeIndex;
100 allocInfo.allocationSize = memoryRequirements.size;
101
102 ANGLE_VK_TRY(context, deviceMemoryOut->allocate(device, allocInfo));
103
104 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
105 // invalid values ensures our testing doesn't assume zero-initialized memory.
106 RendererVk *renderer = context->getRenderer();
107 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
108 {
109 if ((*memoryPropertyFlagsOut & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
110 {
111 // Can map the memory.
112 ANGLE_TRY(vk::InitMappableDeviceMemory(context, deviceMemoryOut,
113 memoryRequirements.size, kNonZeroInitValue,
114 *memoryPropertyFlagsOut));
115 }
116 }
117
118 return angle::Result::Continue;
119 }
120
121 template <typename T>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut)122 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
123 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
124 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
125 const VkMemoryRequirements &memoryRequirements,
126 const void *extraAllocationInfo,
127 T *bufferOrImage,
128 vk::DeviceMemory *deviceMemoryOut)
129 {
130 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
131
132 ANGLE_TRY(FindAndAllocateCompatibleMemory(
133 context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
134 memoryRequirements, extraAllocationInfo, deviceMemoryOut));
135 ANGLE_VK_TRY(context, bufferOrImage->bindMemory(context->getDevice(), *deviceMemoryOut));
136 return angle::Result::Continue;
137 }
138
139 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)140 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
141 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
142 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
143 const void *extraAllocationInfo,
144 T *bufferOrImage,
145 vk::DeviceMemory *deviceMemoryOut,
146 VkDeviceSize *sizeOut)
147 {
148 // Call driver to determine memory requirements.
149 VkMemoryRequirements memoryRequirements;
150 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
151
152 ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
153 context, requestedMemoryPropertyFlags, memoryPropertyFlagsOut, memoryRequirements,
154 extraAllocationInfo, bufferOrImage, deviceMemoryOut));
155
156 *sizeOut = memoryRequirements.size;
157
158 return angle::Result::Continue;
159 }
160
161 // Unified layer that includes full validation layer stack
162 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
163 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
164 const char *kVkValidationLayerNames[] = {
165 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
166 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
167 "VK_LAYER_GOOGLE_unique_objects"};
168
HasValidationLayer(const std::vector<VkLayerProperties> & layerProps,const char * layerName)169 bool HasValidationLayer(const std::vector<VkLayerProperties> &layerProps, const char *layerName)
170 {
171 for (const auto &layerProp : layerProps)
172 {
173 if (std::string(layerProp.layerName) == layerName)
174 {
175 return true;
176 }
177 }
178
179 return false;
180 }
181
HasKhronosValidationLayer(const std::vector<VkLayerProperties> & layerProps)182 bool HasKhronosValidationLayer(const std::vector<VkLayerProperties> &layerProps)
183 {
184 return HasValidationLayer(layerProps, kVkKhronosValidationLayerName);
185 }
186
HasStandardValidationLayer(const std::vector<VkLayerProperties> & layerProps)187 bool HasStandardValidationLayer(const std::vector<VkLayerProperties> &layerProps)
188 {
189 return HasValidationLayer(layerProps, kVkStandardValidationLayerName);
190 }
191
HasValidationLayers(const std::vector<VkLayerProperties> & layerProps)192 bool HasValidationLayers(const std::vector<VkLayerProperties> &layerProps)
193 {
194 for (const char *layerName : kVkValidationLayerNames)
195 {
196 if (!HasValidationLayer(layerProps, layerName))
197 {
198 return false;
199 }
200 }
201
202 return true;
203 }
204 } // anonymous namespace
205
VulkanResultString(VkResult result)206 const char *VulkanResultString(VkResult result)
207 {
208 switch (result)
209 {
210 case VK_SUCCESS:
211 return "Command successfully completed";
212 case VK_NOT_READY:
213 return "A fence or query has not yet completed";
214 case VK_TIMEOUT:
215 return "A wait operation has not completed in the specified time";
216 case VK_EVENT_SET:
217 return "An event is signaled";
218 case VK_EVENT_RESET:
219 return "An event is unsignaled";
220 case VK_INCOMPLETE:
221 return "A return array was too small for the result";
222 case VK_SUBOPTIMAL_KHR:
223 return "A swapchain no longer matches the surface properties exactly, but can still be "
224 "used to present to the surface successfully";
225 case VK_ERROR_OUT_OF_HOST_MEMORY:
226 return "A host memory allocation has failed";
227 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
228 return "A device memory allocation has failed";
229 case VK_ERROR_INITIALIZATION_FAILED:
230 return "Initialization of an object could not be completed for implementation-specific "
231 "reasons";
232 case VK_ERROR_DEVICE_LOST:
233 return "The logical or physical device has been lost";
234 case VK_ERROR_MEMORY_MAP_FAILED:
235 return "Mapping of a memory object has failed";
236 case VK_ERROR_LAYER_NOT_PRESENT:
237 return "A requested layer is not present or could not be loaded";
238 case VK_ERROR_EXTENSION_NOT_PRESENT:
239 return "A requested extension is not supported";
240 case VK_ERROR_FEATURE_NOT_PRESENT:
241 return "A requested feature is not supported";
242 case VK_ERROR_INCOMPATIBLE_DRIVER:
243 return "The requested version of Vulkan is not supported by the driver or is otherwise "
244 "incompatible for implementation-specific reasons";
245 case VK_ERROR_TOO_MANY_OBJECTS:
246 return "Too many objects of the type have already been created";
247 case VK_ERROR_FORMAT_NOT_SUPPORTED:
248 return "A requested format is not supported on this device";
249 case VK_ERROR_SURFACE_LOST_KHR:
250 return "A surface is no longer available";
251 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
252 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
253 "non-Vulkan API";
254 case VK_ERROR_OUT_OF_DATE_KHR:
255 return "A surface has changed in such a way that it is no longer compatible with the "
256 "swapchain";
257 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
258 return "The display used by a swapchain does not use the same presentable image "
259 "layout, or is incompatible in a way that prevents sharing an image";
260 case VK_ERROR_VALIDATION_FAILED_EXT:
261 return "The validation layers detected invalid API usage";
262 case VK_ERROR_INVALID_SHADER_NV:
263 return "Invalid Vulkan shader was generated";
264 case VK_ERROR_OUT_OF_POOL_MEMORY:
265 return "A pool memory allocation has failed";
266 case VK_ERROR_FRAGMENTED_POOL:
267 return "A pool allocation has failed due to fragmentation of the pool's memory";
268 case VK_ERROR_INVALID_EXTERNAL_HANDLE:
269 return "An external handle is not a valid handle of the specified type";
270 default:
271 return "Unknown vulkan error code";
272 }
273 }
274
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)275 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
276 bool mustHaveLayers,
277 VulkanLayerVector *enabledLayerNames)
278 {
279 // Favor unified Khronos layer, but fallback to standard validation
280 if (HasKhronosValidationLayer(layerProps))
281 {
282 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
283 }
284 else if (HasStandardValidationLayer(layerProps))
285 {
286 enabledLayerNames->push_back(kVkStandardValidationLayerName);
287 }
288 else if (HasValidationLayers(layerProps))
289 {
290 for (const char *layerName : kVkValidationLayerNames)
291 {
292 enabledLayerNames->push_back(layerName);
293 }
294 }
295 else
296 {
297 // Generate an error if the layers were explicitly requested, warning otherwise.
298 if (mustHaveLayers)
299 {
300 ERR() << "Vulkan validation layers are missing.";
301 }
302 else
303 {
304 WARN() << "Vulkan validation layers are missing.";
305 }
306
307 return false;
308 }
309
310 return true;
311 }
312
313 namespace vk
314 {
315 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
316 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
317 const char *gANGLEPreferredDevice = "ANGLE_PREFERRED_DEVICE";
318
GetDepthStencilAspectFlags(const angle::Format & format)319 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
320 {
321 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
322 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
323 }
324
GetFormatAspectFlags(const angle::Format & format)325 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
326 {
327 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
328 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
329 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
330 // channels.
331 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
332 }
333
334 // Context implementation.
Context(RendererVk * renderer)335 Context::Context(RendererVk *renderer) : mRenderer(renderer) {}
336
~Context()337 Context::~Context() {}
338
getDevice() const339 VkDevice Context::getDevice() const
340 {
341 return mRenderer->getDevice();
342 }
343
344 // MemoryProperties implementation.
MemoryProperties()345 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
346
init(VkPhysicalDevice physicalDevice)347 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
348 {
349 ASSERT(mMemoryProperties.memoryTypeCount == 0);
350 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
351 ASSERT(mMemoryProperties.memoryTypeCount > 0);
352 }
353
destroy()354 void MemoryProperties::destroy()
355 {
356 mMemoryProperties = {};
357 }
358
hasLazilyAllocatedMemory() const359 bool MemoryProperties::hasLazilyAllocatedMemory() const
360 {
361 for (uint32_t typeIndex = 0; typeIndex < mMemoryProperties.memoryTypeCount; ++typeIndex)
362 {
363 const VkMemoryType &memoryType = mMemoryProperties.memoryTypes[typeIndex];
364 if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
365 {
366 return true;
367 }
368 }
369 return false;
370 }
371
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,bool isExternalMemory,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const372 angle::Result MemoryProperties::findCompatibleMemoryIndex(
373 Context *context,
374 const VkMemoryRequirements &memoryRequirements,
375 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
376 bool isExternalMemory,
377 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
378 uint32_t *typeIndexOut) const
379 {
380 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
381
382 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
383 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
384 // TODO(jmadill): Determine if it is possible to cache indexes.
385 // TODO(jmadill): More efficient memory allocation.
386 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, requestedMemoryPropertyFlags,
387 memoryPropertyFlagsOut, typeIndexOut))
388 {
389 return angle::Result::Continue;
390 }
391
392 // We did not find a compatible memory type. If the caller wanted a host visible memory, just
393 // return the memory index with fallback, guaranteed, memory flags.
394 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
395 {
396 // The Vulkan spec says the following -
397 // There must be at least one memory type with both the
398 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
399 // bits set in its propertyFlags
400 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
401 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
402
403 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, fallbackMemoryPropertyFlags,
404 memoryPropertyFlagsOut, typeIndexOut))
405 {
406 return angle::Result::Continue;
407 }
408 }
409
410 // We did not find a compatible memory type. When importing external memory, there may be
411 // additional restrictions on memoryType. Fallback to requesting device local memory.
412 if (isExternalMemory)
413 {
414 // The Vulkan spec says the following -
415 // There must be at least one memory type with the VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
416 // bit set in its propertyFlags
417 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements,
418 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, memoryPropertyFlagsOut,
419 typeIndexOut))
420 {
421 return angle::Result::Continue;
422 }
423 }
424
425 // TODO(jmadill): Add error message to error.
426 context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
427 return angle::Result::Stop;
428 }
429
430 // StagingBuffer implementation.
StagingBuffer()431 StagingBuffer::StagingBuffer() : mSize(0) {}
432
destroy(RendererVk * renderer)433 void StagingBuffer::destroy(RendererVk *renderer)
434 {
435 VkDevice device = renderer->getDevice();
436 mBuffer.destroy(device);
437 mAllocation.destroy(renderer->getAllocator());
438 mSize = 0;
439 }
440
init(Context * context,VkDeviceSize size,StagingUsage usage)441 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
442 {
443 VkBufferCreateInfo createInfo = {};
444 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
445 createInfo.flags = 0;
446 createInfo.size = size;
447 createInfo.usage = GetStagingBufferUsageFlags(usage);
448 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
449 createInfo.queueFamilyIndexCount = 0;
450 createInfo.pQueueFamilyIndices = nullptr;
451
452 VkMemoryPropertyFlags preferredFlags = 0;
453 VkMemoryPropertyFlags requiredFlags =
454 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
455
456 RendererVk *renderer = context->getRenderer();
457 const Allocator &allocator = renderer->getAllocator();
458
459 uint32_t memoryTypeIndex = 0;
460 ANGLE_VK_TRY(context,
461 allocator.createBuffer(createInfo, requiredFlags, preferredFlags,
462 renderer->getFeatures().persistentlyMappedBuffers.enabled,
463 &memoryTypeIndex, &mBuffer, &mAllocation));
464 mSize = static_cast<size_t>(size);
465
466 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
467 // invalid values ensures our testing doesn't assume zero-initialized memory.
468 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
469 {
470 ANGLE_TRY(InitMappableAllocation(context, allocator, &mAllocation, size, kNonZeroInitValue,
471 requiredFlags));
472 }
473
474 return angle::Result::Continue;
475 }
476
release(ContextVk * contextVk)477 void StagingBuffer::release(ContextVk *contextVk)
478 {
479 contextVk->addGarbage(&mBuffer);
480 contextVk->addGarbage(&mAllocation);
481 }
482
collectGarbage(RendererVk * renderer,Serial serial)483 void StagingBuffer::collectGarbage(RendererVk *renderer, Serial serial)
484 {
485 GarbageList garbageList;
486 garbageList.emplace_back(GetGarbage(&mBuffer));
487 garbageList.emplace_back(GetGarbage(&mAllocation));
488
489 SharedResourceUse sharedUse;
490 sharedUse.init();
491 sharedUse.updateSerialOneOff(serial);
492 renderer->collectGarbage(std::move(sharedUse), std::move(garbageList));
493 }
494
InitMappableAllocation(Context * context,const Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)495 angle::Result InitMappableAllocation(Context *context,
496 const Allocator &allocator,
497 Allocation *allocation,
498 VkDeviceSize size,
499 int value,
500 VkMemoryPropertyFlags memoryPropertyFlags)
501 {
502 uint8_t *mapPointer;
503 ANGLE_VK_TRY(context, allocation->map(allocator, &mapPointer));
504 memset(mapPointer, value, static_cast<size_t>(size));
505
506 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
507 {
508 allocation->flush(allocator, 0, size);
509 }
510
511 allocation->unmap(allocator);
512
513 return angle::Result::Continue;
514 }
515
InitMappableDeviceMemory(Context * context,DeviceMemory * deviceMemory,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)516 angle::Result InitMappableDeviceMemory(Context *context,
517 DeviceMemory *deviceMemory,
518 VkDeviceSize size,
519 int value,
520 VkMemoryPropertyFlags memoryPropertyFlags)
521 {
522 VkDevice device = context->getDevice();
523
524 uint8_t *mapPointer;
525 ANGLE_VK_TRY(context, deviceMemory->map(device, 0, VK_WHOLE_SIZE, 0, &mapPointer));
526 memset(mapPointer, value, static_cast<size_t>(size));
527
528 // if the memory type is not host coherent, we perform an explicit flush
529 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
530 {
531 VkMappedMemoryRange mappedRange = {};
532 mappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
533 mappedRange.memory = deviceMemory->getHandle();
534 mappedRange.size = VK_WHOLE_SIZE;
535 ANGLE_VK_TRY(context, vkFlushMappedMemoryRanges(device, 1, &mappedRange));
536 }
537
538 deviceMemory->unmap(device);
539
540 return angle::Result::Continue;
541 }
542
AllocateBufferMemory(Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)543 angle::Result AllocateBufferMemory(Context *context,
544 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
545 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
546 const void *extraAllocationInfo,
547 Buffer *buffer,
548 DeviceMemory *deviceMemoryOut,
549 VkDeviceSize *sizeOut)
550 {
551 return AllocateBufferOrImageMemory(context, requestedMemoryPropertyFlags,
552 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
553 deviceMemoryOut, sizeOut);
554 }
555
AllocateImageMemory(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)556 angle::Result AllocateImageMemory(Context *context,
557 VkMemoryPropertyFlags memoryPropertyFlags,
558 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
559 const void *extraAllocationInfo,
560 Image *image,
561 DeviceMemory *deviceMemoryOut,
562 VkDeviceSize *sizeOut)
563 {
564 return AllocateBufferOrImageMemory(context, memoryPropertyFlags, memoryPropertyFlagsOut,
565 extraAllocationInfo, image, deviceMemoryOut, sizeOut);
566 }
567
AllocateImageMemoryWithRequirements(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut)568 angle::Result AllocateImageMemoryWithRequirements(Context *context,
569 VkMemoryPropertyFlags memoryPropertyFlags,
570 const VkMemoryRequirements &memoryRequirements,
571 const void *extraAllocationInfo,
572 Image *image,
573 DeviceMemory *deviceMemoryOut)
574 {
575 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
576 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
577 memoryRequirements, extraAllocationInfo, image,
578 deviceMemoryOut);
579 }
580
AllocateBufferMemoryWithRequirements(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Buffer * buffer,VkMemoryPropertyFlags * memoryPropertyFlagsOut,DeviceMemory * deviceMemoryOut)581 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
582 VkMemoryPropertyFlags memoryPropertyFlags,
583 const VkMemoryRequirements &memoryRequirements,
584 const void *extraAllocationInfo,
585 Buffer *buffer,
586 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
587 DeviceMemory *deviceMemoryOut)
588 {
589 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, memoryPropertyFlagsOut,
590 memoryRequirements, extraAllocationInfo, buffer,
591 deviceMemoryOut);
592 }
593
InitShaderAndSerial(Context * context,ShaderAndSerial * shaderAndSerial,const uint32_t * shaderCode,size_t shaderCodeSize)594 angle::Result InitShaderAndSerial(Context *context,
595 ShaderAndSerial *shaderAndSerial,
596 const uint32_t *shaderCode,
597 size_t shaderCodeSize)
598 {
599 VkShaderModuleCreateInfo createInfo = {};
600 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
601 createInfo.flags = 0;
602 createInfo.codeSize = shaderCodeSize;
603 createInfo.pCode = shaderCode;
604
605 ANGLE_VK_TRY(context, shaderAndSerial->get().init(context->getDevice(), createInfo));
606 shaderAndSerial->updateSerial(context->getRenderer()->issueShaderSerial());
607 return angle::Result::Continue;
608 }
609
Get2DTextureType(uint32_t layerCount,GLint samples)610 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
611 {
612 if (layerCount > 1)
613 {
614 if (samples > 1)
615 {
616 return gl::TextureType::_2DMultisampleArray;
617 }
618 else
619 {
620 return gl::TextureType::_2DArray;
621 }
622 }
623 else
624 {
625 if (samples > 1)
626 {
627 return gl::TextureType::_2DMultisample;
628 }
629 else
630 {
631 return gl::TextureType::_2D;
632 }
633 }
634 }
635
GarbageObject()636 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
637
GarbageObject(HandleType handleType,GarbageHandle handle)638 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
639 : mHandleType(handleType), mHandle(handle)
640 {}
641
GarbageObject(GarbageObject && other)642 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
643 {
644 *this = std::move(other);
645 }
646
operator =(GarbageObject && rhs)647 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
648 {
649 std::swap(mHandle, rhs.mHandle);
650 std::swap(mHandleType, rhs.mHandleType);
651 return *this;
652 }
653
654 // GarbageObject implementation
655 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
656 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(RendererVk * renderer)657 void GarbageObject::destroy(RendererVk *renderer)
658 {
659 ANGLE_TRACE_EVENT0("gpu.angle", "GarbageObject::destroy");
660 VkDevice device = renderer->getDevice();
661 switch (mHandleType)
662 {
663 case HandleType::Semaphore:
664 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
665 break;
666 case HandleType::CommandBuffer:
667 // Command buffers are pool allocated.
668 UNREACHABLE();
669 break;
670 case HandleType::Event:
671 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
672 break;
673 case HandleType::Fence:
674 vkDestroyFence(device, (VkFence)mHandle, nullptr);
675 break;
676 case HandleType::DeviceMemory:
677 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
678 break;
679 case HandleType::Buffer:
680 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
681 break;
682 case HandleType::BufferView:
683 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
684 break;
685 case HandleType::Image:
686 vkDestroyImage(device, (VkImage)mHandle, nullptr);
687 break;
688 case HandleType::ImageView:
689 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
690 break;
691 case HandleType::ShaderModule:
692 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
693 break;
694 case HandleType::PipelineLayout:
695 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
696 break;
697 case HandleType::RenderPass:
698 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
699 break;
700 case HandleType::Pipeline:
701 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
702 break;
703 case HandleType::DescriptorSetLayout:
704 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
705 break;
706 case HandleType::Sampler:
707 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
708 break;
709 case HandleType::DescriptorPool:
710 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
711 break;
712 case HandleType::Framebuffer:
713 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
714 break;
715 case HandleType::CommandPool:
716 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
717 break;
718 case HandleType::QueryPool:
719 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
720 break;
721 case HandleType::Allocation:
722 vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
723 break;
724 default:
725 UNREACHABLE();
726 break;
727 }
728
729 renderer->getActiveHandleCounts().onDeallocate(mHandleType);
730 }
731
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)732 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
733 {
734 static constexpr angle::ColorF kLabelColors[6] = {
735 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
736 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
737 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
738 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
739 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
740 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
741 };
742
743 int colorIndex = source - GL_DEBUG_SOURCE_API;
744 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
745
746 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
747 label->pNext = nullptr;
748 label->pLabelName = marker;
749 kLabelColors[colorIndex].writeData(label->color);
750 }
751
752 // ClearValuesArray implementation.
ClearValuesArray()753 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
754
755 ClearValuesArray::~ClearValuesArray() = default;
756
757 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
758
759 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
760
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)761 void ClearValuesArray::store(uint32_t index,
762 VkImageAspectFlags aspectFlags,
763 const VkClearValue &clearValue)
764 {
765 ASSERT(aspectFlags != 0);
766
767 // We do this double if to handle the packed depth-stencil case.
768 if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
769 {
770 // Ensure for packed DS we're writing to the depth index.
771 ASSERT(index == kUnpackedDepthIndex ||
772 (index == kUnpackedStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
773
774 storeNoDepthStencil(kUnpackedStencilIndex, clearValue);
775 }
776
777 if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
778 {
779 storeNoDepthStencil(index, clearValue);
780 }
781 }
782
storeNoDepthStencil(uint32_t index,const VkClearValue & clearValue)783 void ClearValuesArray::storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue)
784 {
785 mValues[index] = clearValue;
786 mEnabled.set(index);
787 }
788
getColorMask() const789 gl::DrawBufferMask ClearValuesArray::getColorMask() const
790 {
791 constexpr uint32_t kColorBuffersMask =
792 angle::BitMask<uint32_t>(gl::IMPLEMENTATION_MAX_DRAW_BUFFERS);
793 return gl::DrawBufferMask(mEnabled.bits() & kColorBuffersMask);
794 }
795
796 // ResourceSerialFactory implementation.
ResourceSerialFactory()797 ResourceSerialFactory::ResourceSerialFactory() : mCurrentUniqueSerial(1) {}
798
~ResourceSerialFactory()799 ResourceSerialFactory::~ResourceSerialFactory() {}
800
issueSerial()801 uint32_t ResourceSerialFactory::issueSerial()
802 {
803 uint32_t newSerial = ++mCurrentUniqueSerial;
804 // make sure serial does not wrap
805 ASSERT(newSerial > 0);
806 return newSerial;
807 }
808
809 #define ANGLE_DEFINE_GEN_VK_SERIAL(Type) \
810 Type##Serial ResourceSerialFactory::generate##Type##Serial() \
811 { \
812 return Type##Serial(issueSerial()); \
813 }
814
ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)815 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)
816
817 void ClampViewport(VkViewport *viewport)
818 {
819 // 0-sized viewports are invalid in Vulkan.
820 ASSERT(viewport);
821 if (viewport->width == 0.0f)
822 {
823 viewport->width = 1.0f;
824 }
825 if (viewport->height == 0.0f)
826 {
827 viewport->height = 1.0f;
828 }
829 }
830
831 } // namespace vk
832
833 #if !defined(ANGLE_SHARED_LIBVULKAN)
834 // VK_EXT_debug_utils
835 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
836 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
837 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
838 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
839 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
840
841 // VK_EXT_debug_report
842 PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
843 PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
844
845 // VK_KHR_get_physical_device_properties2
846 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
847 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = nullptr;
848 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = nullptr;
849
850 // VK_KHR_external_semaphore_fd
851 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
852
853 // VK_EXT_external_memory_host
854 PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = nullptr;
855
856 // VK_EXT_transform_feedback
857 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
858 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = nullptr;
859 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = nullptr;
860 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = nullptr;
861 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = nullptr;
862 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = nullptr;
863
864 // VK_KHR_get_memory_requirements2
865 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
866 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = nullptr;
867
868 // VK_KHR_bind_memory2
869 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = nullptr;
870 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = nullptr;
871
872 // VK_KHR_external_fence_capabilities
873 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
874 nullptr;
875
876 // VK_KHR_external_fence_fd
877 PFN_vkGetFenceFdKHR vkGetFenceFdKHR = nullptr;
878 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
879
880 // VK_KHR_external_semaphore_capabilities
881 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
882 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
883
884 // VK_KHR_sampler_ycbcr_conversion
885 PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = nullptr;
886 PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = nullptr;
887
888 // VK_KHR_create_renderpass2
889 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = nullptr;
890
891 # if defined(ANGLE_PLATFORM_FUCHSIA)
892 // VK_FUCHSIA_imagepipe_surface
893 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
894 # endif
895
896 # if defined(ANGLE_PLATFORM_ANDROID)
897 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
898 nullptr;
899 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
900 # endif
901
902 # if defined(ANGLE_PLATFORM_GGP)
903 PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = nullptr;
904 # endif
905
906 # define GET_INSTANCE_FUNC(vkName) \
907 do \
908 { \
909 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
910 ASSERT(vkName); \
911 } while (0)
912
913 # define GET_DEVICE_FUNC(vkName) \
914 do \
915 { \
916 vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
917 ASSERT(vkName); \
918 } while (0)
919
InitDebugUtilsEXTFunctions(VkInstance instance)920 void InitDebugUtilsEXTFunctions(VkInstance instance)
921 {
922 GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
923 GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
924 GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
925 GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
926 GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
927 }
928
InitDebugReportEXTFunctions(VkInstance instance)929 void InitDebugReportEXTFunctions(VkInstance instance)
930 {
931 GET_INSTANCE_FUNC(vkCreateDebugReportCallbackEXT);
932 GET_INSTANCE_FUNC(vkDestroyDebugReportCallbackEXT);
933 }
934
InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)935 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)
936 {
937 GET_INSTANCE_FUNC(vkGetPhysicalDeviceProperties2KHR);
938 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFeatures2KHR);
939 GET_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR);
940 }
941
InitTransformFeedbackEXTFunctions(VkDevice device)942 void InitTransformFeedbackEXTFunctions(VkDevice device)
943 {
944 GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
945 GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
946 GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
947 GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
948 GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
949 GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
950 }
951
952 // VK_KHR_sampler_ycbcr_conversion
InitSamplerYcbcrKHRFunctions(VkDevice device)953 void InitSamplerYcbcrKHRFunctions(VkDevice device)
954 {
955 GET_DEVICE_FUNC(vkCreateSamplerYcbcrConversionKHR);
956 GET_DEVICE_FUNC(vkDestroySamplerYcbcrConversionKHR);
957 }
958
959 // VK_KHR_create_renderpass2
InitRenderPass2KHRFunctions(VkDevice device)960 void InitRenderPass2KHRFunctions(VkDevice device)
961 {
962 GET_DEVICE_FUNC(vkCreateRenderPass2KHR);
963 }
964
965 # if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)966 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
967 {
968 GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
969 }
970 # endif
971
972 # if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)973 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)
974 {
975 GET_INSTANCE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
976 GET_INSTANCE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
977 }
978 # endif
979
980 # if defined(ANGLE_PLATFORM_GGP)
InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)981 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)
982 {
983 GET_INSTANCE_FUNC(vkCreateStreamDescriptorSurfaceGGP);
984 }
985 # endif // defined(ANGLE_PLATFORM_GGP)
986
InitExternalSemaphoreFdFunctions(VkInstance instance)987 void InitExternalSemaphoreFdFunctions(VkInstance instance)
988 {
989 GET_INSTANCE_FUNC(vkImportSemaphoreFdKHR);
990 }
991
InitExternalMemoryHostFunctions(VkInstance instance)992 void InitExternalMemoryHostFunctions(VkInstance instance)
993 {
994 GET_INSTANCE_FUNC(vkGetMemoryHostPointerPropertiesEXT);
995 }
996
997 // VK_KHR_get_memory_requirements2
InitGetMemoryRequirements2KHRFunctions(VkDevice device)998 void InitGetMemoryRequirements2KHRFunctions(VkDevice device)
999 {
1000 GET_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR);
1001 GET_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR);
1002 }
1003
1004 // VK_KHR_bind_memory2
InitBindMemory2KHRFunctions(VkDevice device)1005 void InitBindMemory2KHRFunctions(VkDevice device)
1006 {
1007 GET_DEVICE_FUNC(vkBindBufferMemory2KHR);
1008 GET_DEVICE_FUNC(vkBindImageMemory2KHR);
1009 }
1010
1011 // VK_KHR_external_fence_capabilities
InitExternalFenceCapabilitiesFunctions(VkInstance instance)1012 void InitExternalFenceCapabilitiesFunctions(VkInstance instance)
1013 {
1014 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalFencePropertiesKHR);
1015 }
1016
1017 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkInstance instance)1018 void InitExternalFenceFdFunctions(VkInstance instance)
1019 {
1020 GET_INSTANCE_FUNC(vkGetFenceFdKHR);
1021 GET_INSTANCE_FUNC(vkImportFenceFdKHR);
1022 }
1023
1024 // VK_KHR_external_semaphore_capabilities
InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)1025 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)
1026 {
1027 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalSemaphorePropertiesKHR);
1028 }
1029
1030 # undef GET_INSTANCE_FUNC
1031 # undef GET_DEVICE_FUNC
1032
1033 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1034
CalculateGenerateMipmapFilter(ContextVk * contextVk,const vk::Format & format)1035 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, const vk::Format &format)
1036 {
1037 const bool formatSupportsLinearFiltering = contextVk->getRenderer()->hasImageFormatFeatureBits(
1038 format.actualImageFormatID, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
1039 const bool hintFastest = contextVk->getState().getGenerateMipmapHint() == GL_FASTEST;
1040
1041 return formatSupportsLinearFiltering && !hintFastest ? GL_LINEAR : GL_NEAREST;
1042 }
1043
1044 // Return the log of samples. Assumes |sampleCount| is a power of 2. The result can be used to
1045 // index an array based on sample count. See for example TextureVk::PerSampleCountArray.
PackSampleCount(GLint sampleCount)1046 size_t PackSampleCount(GLint sampleCount)
1047 {
1048 if (sampleCount == 0)
1049 {
1050 sampleCount = 1;
1051 }
1052
1053 // We currently only support up to 16xMSAA.
1054 ASSERT(sampleCount <= VK_SAMPLE_COUNT_16_BIT);
1055 ASSERT(gl::isPow2(sampleCount));
1056 return gl::ScanForward(static_cast<uint32_t>(sampleCount));
1057 }
1058
1059 namespace gl_vk
1060 {
1061
GetFilter(const GLenum filter)1062 VkFilter GetFilter(const GLenum filter)
1063 {
1064 switch (filter)
1065 {
1066 case GL_LINEAR_MIPMAP_LINEAR:
1067 case GL_LINEAR_MIPMAP_NEAREST:
1068 case GL_LINEAR:
1069 return VK_FILTER_LINEAR;
1070 case GL_NEAREST_MIPMAP_LINEAR:
1071 case GL_NEAREST_MIPMAP_NEAREST:
1072 case GL_NEAREST:
1073 return VK_FILTER_NEAREST;
1074 default:
1075 UNIMPLEMENTED();
1076 return VK_FILTER_MAX_ENUM;
1077 }
1078 }
1079
GetSamplerMipmapMode(const GLenum filter)1080 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
1081 {
1082 switch (filter)
1083 {
1084 case GL_LINEAR_MIPMAP_LINEAR:
1085 case GL_NEAREST_MIPMAP_LINEAR:
1086 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
1087 case GL_LINEAR:
1088 case GL_NEAREST:
1089 case GL_NEAREST_MIPMAP_NEAREST:
1090 case GL_LINEAR_MIPMAP_NEAREST:
1091 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
1092 default:
1093 UNIMPLEMENTED();
1094 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
1095 }
1096 }
1097
GetSamplerAddressMode(const GLenum wrap)1098 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
1099 {
1100 switch (wrap)
1101 {
1102 case GL_REPEAT:
1103 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
1104 case GL_MIRRORED_REPEAT:
1105 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
1106 case GL_CLAMP_TO_BORDER:
1107 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
1108 case GL_CLAMP_TO_EDGE:
1109 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1110 default:
1111 UNIMPLEMENTED();
1112 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
1113 }
1114 }
1115
GetRect(const gl::Rectangle & source)1116 VkRect2D GetRect(const gl::Rectangle &source)
1117 {
1118 return {{source.x, source.y},
1119 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
1120 }
1121
GetPrimitiveTopology(gl::PrimitiveMode mode)1122 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
1123 {
1124 switch (mode)
1125 {
1126 case gl::PrimitiveMode::Triangles:
1127 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1128 case gl::PrimitiveMode::Points:
1129 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1130 case gl::PrimitiveMode::Lines:
1131 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
1132 case gl::PrimitiveMode::LineStrip:
1133 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1134 case gl::PrimitiveMode::TriangleFan:
1135 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
1136 case gl::PrimitiveMode::TriangleStrip:
1137 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1138 case gl::PrimitiveMode::LineLoop:
1139 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1140 case gl::PrimitiveMode::LinesAdjacency:
1141 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
1142 case gl::PrimitiveMode::LineStripAdjacency:
1143 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
1144 case gl::PrimitiveMode::TrianglesAdjacency:
1145 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
1146 case gl::PrimitiveMode::TriangleStripAdjacency:
1147 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
1148 case gl::PrimitiveMode::Patches:
1149 return VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
1150 default:
1151 UNREACHABLE();
1152 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1153 }
1154 }
1155
GetCullMode(const gl::RasterizerState & rasterState)1156 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
1157 {
1158 if (!rasterState.cullFace)
1159 {
1160 return VK_CULL_MODE_NONE;
1161 }
1162
1163 switch (rasterState.cullMode)
1164 {
1165 case gl::CullFaceMode::Front:
1166 return VK_CULL_MODE_FRONT_BIT;
1167 case gl::CullFaceMode::Back:
1168 return VK_CULL_MODE_BACK_BIT;
1169 case gl::CullFaceMode::FrontAndBack:
1170 return VK_CULL_MODE_FRONT_AND_BACK;
1171 default:
1172 UNREACHABLE();
1173 return VK_CULL_MODE_NONE;
1174 }
1175 }
1176
GetFrontFace(GLenum frontFace,bool invertCullFace)1177 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
1178 {
1179 // Invert CW and CCW to have the same behavior as OpenGL.
1180 switch (frontFace)
1181 {
1182 case GL_CW:
1183 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
1184 case GL_CCW:
1185 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
1186 default:
1187 UNREACHABLE();
1188 return VK_FRONT_FACE_CLOCKWISE;
1189 }
1190 }
1191
GetSamples(GLint sampleCount)1192 VkSampleCountFlagBits GetSamples(GLint sampleCount)
1193 {
1194 switch (sampleCount)
1195 {
1196 case 0:
1197 UNREACHABLE();
1198 return VK_SAMPLE_COUNT_1_BIT;
1199 case 1:
1200 return VK_SAMPLE_COUNT_1_BIT;
1201 case 2:
1202 return VK_SAMPLE_COUNT_2_BIT;
1203 case 4:
1204 return VK_SAMPLE_COUNT_4_BIT;
1205 case 8:
1206 return VK_SAMPLE_COUNT_8_BIT;
1207 case 16:
1208 return VK_SAMPLE_COUNT_16_BIT;
1209 case 32:
1210 return VK_SAMPLE_COUNT_32_BIT;
1211 default:
1212 UNREACHABLE();
1213 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1214 }
1215 }
1216
GetSwizzle(const GLenum swizzle)1217 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1218 {
1219 switch (swizzle)
1220 {
1221 case GL_ALPHA:
1222 return VK_COMPONENT_SWIZZLE_A;
1223 case GL_RED:
1224 return VK_COMPONENT_SWIZZLE_R;
1225 case GL_GREEN:
1226 return VK_COMPONENT_SWIZZLE_G;
1227 case GL_BLUE:
1228 return VK_COMPONENT_SWIZZLE_B;
1229 case GL_ZERO:
1230 return VK_COMPONENT_SWIZZLE_ZERO;
1231 case GL_ONE:
1232 return VK_COMPONENT_SWIZZLE_ONE;
1233 default:
1234 UNREACHABLE();
1235 return VK_COMPONENT_SWIZZLE_IDENTITY;
1236 }
1237 }
1238
GetCompareOp(const GLenum compareFunc)1239 VkCompareOp GetCompareOp(const GLenum compareFunc)
1240 {
1241 switch (compareFunc)
1242 {
1243 case GL_NEVER:
1244 return VK_COMPARE_OP_NEVER;
1245 case GL_LESS:
1246 return VK_COMPARE_OP_LESS;
1247 case GL_EQUAL:
1248 return VK_COMPARE_OP_EQUAL;
1249 case GL_LEQUAL:
1250 return VK_COMPARE_OP_LESS_OR_EQUAL;
1251 case GL_GREATER:
1252 return VK_COMPARE_OP_GREATER;
1253 case GL_NOTEQUAL:
1254 return VK_COMPARE_OP_NOT_EQUAL;
1255 case GL_GEQUAL:
1256 return VK_COMPARE_OP_GREATER_OR_EQUAL;
1257 case GL_ALWAYS:
1258 return VK_COMPARE_OP_ALWAYS;
1259 default:
1260 UNREACHABLE();
1261 return VK_COMPARE_OP_ALWAYS;
1262 }
1263 }
1264
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1265 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1266 {
1267 vkOffset->x = glOffset.x;
1268 vkOffset->y = glOffset.y;
1269 vkOffset->z = glOffset.z;
1270 }
1271
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1272 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1273 {
1274 vkExtent->width = glExtent.width;
1275 vkExtent->height = glExtent.height;
1276 vkExtent->depth = glExtent.depth;
1277 }
1278
GetImageType(gl::TextureType textureType)1279 VkImageType GetImageType(gl::TextureType textureType)
1280 {
1281 switch (textureType)
1282 {
1283 case gl::TextureType::_2D:
1284 case gl::TextureType::_2DArray:
1285 case gl::TextureType::_2DMultisample:
1286 case gl::TextureType::_2DMultisampleArray:
1287 case gl::TextureType::CubeMap:
1288 case gl::TextureType::CubeMapArray:
1289 case gl::TextureType::External:
1290 return VK_IMAGE_TYPE_2D;
1291 case gl::TextureType::_3D:
1292 return VK_IMAGE_TYPE_3D;
1293 default:
1294 // We will need to implement all the texture types for ES3+.
1295 UNIMPLEMENTED();
1296 return VK_IMAGE_TYPE_MAX_ENUM;
1297 }
1298 }
1299
GetImageViewType(gl::TextureType textureType)1300 VkImageViewType GetImageViewType(gl::TextureType textureType)
1301 {
1302 switch (textureType)
1303 {
1304 case gl::TextureType::_2D:
1305 case gl::TextureType::_2DMultisample:
1306 case gl::TextureType::External:
1307 return VK_IMAGE_VIEW_TYPE_2D;
1308 case gl::TextureType::_2DArray:
1309 case gl::TextureType::_2DMultisampleArray:
1310 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1311 case gl::TextureType::_3D:
1312 return VK_IMAGE_VIEW_TYPE_3D;
1313 case gl::TextureType::CubeMap:
1314 return VK_IMAGE_VIEW_TYPE_CUBE;
1315 case gl::TextureType::CubeMapArray:
1316 return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
1317 default:
1318 // We will need to implement all the texture types for ES3+.
1319 UNIMPLEMENTED();
1320 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1321 }
1322 }
1323
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1324 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1325 {
1326 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1327 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1328 }
1329
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1330 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1331 {
1332 VkShaderStageFlags flags = 0;
1333 for (const gl::ShaderType shaderType : activeShaders)
1334 {
1335 flags |= kShaderStageMap[shaderType];
1336 }
1337 return flags;
1338 }
1339
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,bool clipSpaceOriginUpperLeft,GLint renderAreaHeight,VkViewport * viewportOut)1340 void GetViewport(const gl::Rectangle &viewport,
1341 float nearPlane,
1342 float farPlane,
1343 bool invertViewport,
1344 bool clipSpaceOriginUpperLeft,
1345 GLint renderAreaHeight,
1346 VkViewport *viewportOut)
1347 {
1348 viewportOut->x = static_cast<float>(viewport.x);
1349 viewportOut->y = static_cast<float>(viewport.y);
1350 viewportOut->width = static_cast<float>(viewport.width);
1351 viewportOut->height = static_cast<float>(viewport.height);
1352 viewportOut->minDepth = gl::clamp01(nearPlane);
1353 viewportOut->maxDepth = gl::clamp01(farPlane);
1354
1355 // Say an application intends to draw a primitive (shown as 'o' below), it can choose to use
1356 // different clip space origin. When clip space origin (shown as 'C' below) is switched from
1357 // lower-left to upper-left, primitives will be rendered with its y-coordinate flipped.
1358
1359 // Rendered content will differ based on whether it is a default framebuffer or a user defined
1360 // framebuffer. We modify the viewport's 'y' and 'h' accordingly.
1361
1362 // clip space origin is lower-left
1363 // Expected draw in GLES default framebuffer user defined framebuffer
1364 // (0,H) (0,0) (0,0)
1365 // + +-----------+ (W,0) +-----------+ (W,0)
1366 // | | | C----+
1367 // | | | | | (h)
1368 // | +----+ | +----+ | | O |
1369 // | | O | | | O | (-h) | +----+
1370 // | | | | | | |
1371 // | C----+ | C----+ |
1372 // +-----------+ (W,0) + +
1373 // (0,0) (0,H) (0,H)
1374 // y' = H - h y' = y
1375
1376 // clip space origin is upper-left
1377 // Expected draw in GLES default framebuffer user defined framebuffer
1378 // (0,H) (0,0) (0,0)
1379 // + +-----------+ (W,0) +-----------+ (W,0)
1380 // | | | +----+
1381 // | | | | O | (-h)
1382 // | C----+ | C----+ | | |
1383 // | | | | | | (h) | C----+
1384 // | | O | | | O | |
1385 // | +----+ | +----+ |
1386 // +-----------+ (W,0) + +
1387 // (0,0) (0,H) (0,H)
1388 // y' = H - (y + h) y' = y + H
1389
1390 if (clipSpaceOriginUpperLeft)
1391 {
1392 if (invertViewport)
1393 {
1394 viewportOut->y = static_cast<float>(renderAreaHeight - (viewport.height + viewport.y));
1395 }
1396 else
1397 {
1398 viewportOut->y = static_cast<float>(viewport.height + viewport.y);
1399 viewportOut->height = -viewportOut->height;
1400 }
1401 }
1402 else
1403 {
1404 if (invertViewport)
1405 {
1406 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
1407 viewportOut->height = -viewportOut->height;
1408 }
1409 }
1410 }
1411
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1412 void GetExtentsAndLayerCount(gl::TextureType textureType,
1413 const gl::Extents &extents,
1414 VkExtent3D *extentsOut,
1415 uint32_t *layerCountOut)
1416 {
1417 extentsOut->width = extents.width;
1418 extentsOut->height = extents.height;
1419
1420 switch (textureType)
1421 {
1422 case gl::TextureType::CubeMap:
1423 extentsOut->depth = 1;
1424 *layerCountOut = gl::kCubeFaceCount;
1425 break;
1426
1427 case gl::TextureType::_2DArray:
1428 case gl::TextureType::_2DMultisampleArray:
1429 case gl::TextureType::CubeMapArray:
1430 extentsOut->depth = 1;
1431 *layerCountOut = extents.depth;
1432 break;
1433
1434 default:
1435 extentsOut->depth = extents.depth;
1436 *layerCountOut = 1;
1437 break;
1438 }
1439 }
1440
GetLevelIndex(gl::LevelIndex levelGL,gl::LevelIndex baseLevel)1441 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel)
1442 {
1443 ASSERT(baseLevel <= levelGL);
1444 return vk::LevelIndex(levelGL.get() - baseLevel.get());
1445 }
1446 } // namespace gl_vk
1447
1448 namespace vk_gl
1449 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1450 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1451 {
1452 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1453 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1454 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1455 {
1456 setOut->insert(static_cast<GLuint>(1 << bit));
1457 }
1458 }
1459
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1460 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1461 {
1462 GLuint maxCount = 0;
1463 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1464 {
1465 maxCount = static_cast<GLuint>(1 << bit);
1466 }
1467 return maxCount;
1468 }
1469
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1470 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1471 {
1472 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1473 {
1474 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1475 if (sampleCount >= requestedCount)
1476 {
1477 return sampleCount;
1478 }
1479 }
1480
1481 UNREACHABLE();
1482 return 0;
1483 }
1484
GetLevelIndex(vk::LevelIndex levelVk,gl::LevelIndex baseLevel)1485 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel)
1486 {
1487 return gl::LevelIndex(levelVk.get() + baseLevel.get());
1488 }
1489 } // namespace vk_gl
1490 } // namespace rx
1491