1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #include "libANGLE/renderer/vulkan/vk_utils.h"
11
12 #include "libANGLE/Context.h"
13 #include "libANGLE/renderer/vulkan/BufferVk.h"
14 #include "libANGLE/renderer/vulkan/ContextVk.h"
15 #include "libANGLE/renderer/vulkan/DisplayVk.h"
16 #include "libANGLE/renderer/vulkan/RendererVk.h"
17 #include "libANGLE/renderer/vulkan/ResourceVk.h"
18 #include "libANGLE/renderer/vulkan/android/vk_android_utils.h"
19 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
20
21 namespace angle
22 {
ToEGL(Result result,rx::DisplayVk * displayVk,EGLint errorCode)23 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode)
24 {
25 if (result != angle::Result::Continue)
26 {
27 return displayVk->getEGLError(errorCode);
28 }
29 else
30 {
31 return egl::NoError();
32 }
33 }
34 } // namespace angle
35
36 namespace rx
37 {
38 namespace
39 {
40 // Pick an arbitrary value to initialize non-zero memory for sanitization. Note that 0x3F3F3F3F
41 // as float is about 0.75.
42 constexpr int kNonZeroInitValue = 0x3F;
43
GetStagingBufferUsageFlags(vk::StagingUsage usage)44 VkImageUsageFlags GetStagingBufferUsageFlags(vk::StagingUsage usage)
45 {
46 switch (usage)
47 {
48 case vk::StagingUsage::Read:
49 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
50 case vk::StagingUsage::Write:
51 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
52 case vk::StagingUsage::Both:
53 return (VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
54 default:
55 UNREACHABLE();
56 return 0;
57 }
58 }
59
FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties & memoryProperties,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut)60 bool FindCompatibleMemory(const VkPhysicalDeviceMemoryProperties &memoryProperties,
61 const VkMemoryRequirements &memoryRequirements,
62 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
63 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
64 uint32_t *typeIndexOut)
65 {
66 for (size_t memoryIndex : angle::BitSet32<32>(memoryRequirements.memoryTypeBits))
67 {
68 ASSERT(memoryIndex < memoryProperties.memoryTypeCount);
69
70 if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
71 requestedMemoryPropertyFlags) == requestedMemoryPropertyFlags)
72 {
73 *memoryPropertyFlagsOut = memoryProperties.memoryTypes[memoryIndex].propertyFlags;
74 *typeIndexOut = static_cast<uint32_t>(memoryIndex);
75 return true;
76 }
77 }
78
79 return false;
80 }
81
FindAndAllocateCompatibleMemory(vk::Context * context,const vk::MemoryProperties & memoryProperties,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,vk::DeviceMemory * deviceMemoryOut)82 angle::Result FindAndAllocateCompatibleMemory(vk::Context *context,
83 const vk::MemoryProperties &memoryProperties,
84 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
85 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
86 const VkMemoryRequirements &memoryRequirements,
87 const void *extraAllocationInfo,
88 vk::DeviceMemory *deviceMemoryOut)
89 {
90 VkDevice device = context->getDevice();
91
92 uint32_t memoryTypeIndex = 0;
93 ANGLE_TRY(memoryProperties.findCompatibleMemoryIndex(
94 context, memoryRequirements, requestedMemoryPropertyFlags, (extraAllocationInfo != nullptr),
95 memoryPropertyFlagsOut, &memoryTypeIndex));
96
97 VkMemoryAllocateInfo allocInfo = {};
98 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
99 allocInfo.pNext = extraAllocationInfo;
100 allocInfo.memoryTypeIndex = memoryTypeIndex;
101 allocInfo.allocationSize = memoryRequirements.size;
102
103 ANGLE_VK_TRY(context, deviceMemoryOut->allocate(device, allocInfo));
104
105 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
106 // invalid values ensures our testing doesn't assume zero-initialized memory.
107 RendererVk *renderer = context->getRenderer();
108 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
109 {
110 if ((*memoryPropertyFlagsOut & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
111 {
112 // Can map the memory.
113 ANGLE_TRY(vk::InitMappableDeviceMemory(context, deviceMemoryOut,
114 memoryRequirements.size, kNonZeroInitValue,
115 *memoryPropertyFlagsOut));
116 }
117 }
118
119 return angle::Result::Continue;
120 }
121
122 template <typename T>
123 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
124 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
125 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
126 const VkMemoryRequirements &memoryRequirements,
127 const void *extraAllocationInfo,
128 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
129 T *bufferOrImage,
130 vk::DeviceMemory *deviceMemoryOut);
131
132 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Image * image,vk::DeviceMemory * deviceMemoryOut)133 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
134 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
135 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
136 const VkMemoryRequirements &memoryRequirements,
137 const void *extraAllocationInfo,
138 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
139 vk::Image *image,
140 vk::DeviceMemory *deviceMemoryOut)
141 {
142 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
143
144 ANGLE_TRY(FindAndAllocateCompatibleMemory(
145 context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
146 memoryRequirements, extraAllocationInfo, deviceMemoryOut));
147
148 if (extraBindInfo)
149 {
150 VkBindImageMemoryInfoKHR bindInfo = {};
151 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
152 bindInfo.pNext = extraBindInfo;
153 bindInfo.image = image->getHandle();
154 bindInfo.memory = deviceMemoryOut->getHandle();
155 bindInfo.memoryOffset = 0;
156
157 ANGLE_VK_TRY(context, image->bindMemory2(context->getDevice(), bindInfo));
158 }
159 else
160 {
161 ANGLE_VK_TRY(context, image->bindMemory(context->getDevice(), *deviceMemoryOut));
162 }
163
164 return angle::Result::Continue;
165 }
166
167 template <>
AllocateAndBindBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,vk::Buffer * buffer,vk::DeviceMemory * deviceMemoryOut)168 angle::Result AllocateAndBindBufferOrImageMemory(vk::Context *context,
169 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
170 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
171 const VkMemoryRequirements &memoryRequirements,
172 const void *extraAllocationInfo,
173 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
174 vk::Buffer *buffer,
175 vk::DeviceMemory *deviceMemoryOut)
176 {
177 ASSERT(extraBindInfo == nullptr);
178
179 const vk::MemoryProperties &memoryProperties = context->getRenderer()->getMemoryProperties();
180
181 ANGLE_TRY(FindAndAllocateCompatibleMemory(
182 context, memoryProperties, requestedMemoryPropertyFlags, memoryPropertyFlagsOut,
183 memoryRequirements, extraAllocationInfo, deviceMemoryOut));
184 ANGLE_VK_TRY(context, buffer->bindMemory(context->getDevice(), *deviceMemoryOut));
185 return angle::Result::Continue;
186 }
187
188 template <typename T>
AllocateBufferOrImageMemory(vk::Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,T * bufferOrImage,vk::DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)189 angle::Result AllocateBufferOrImageMemory(vk::Context *context,
190 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
191 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
192 const void *extraAllocationInfo,
193 T *bufferOrImage,
194 vk::DeviceMemory *deviceMemoryOut,
195 VkDeviceSize *sizeOut)
196 {
197 // Call driver to determine memory requirements.
198 VkMemoryRequirements memoryRequirements;
199 bufferOrImage->getMemoryRequirements(context->getDevice(), &memoryRequirements);
200
201 ANGLE_TRY(AllocateAndBindBufferOrImageMemory(
202 context, requestedMemoryPropertyFlags, memoryPropertyFlagsOut, memoryRequirements,
203 extraAllocationInfo, nullptr, bufferOrImage, deviceMemoryOut));
204
205 *sizeOut = memoryRequirements.size;
206
207 return angle::Result::Continue;
208 }
209
210 // Unified layer that includes full validation layer stack
211 constexpr char kVkKhronosValidationLayerName[] = "VK_LAYER_KHRONOS_validation";
212 constexpr char kVkStandardValidationLayerName[] = "VK_LAYER_LUNARG_standard_validation";
213 const char *kVkValidationLayerNames[] = {
214 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
215 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation",
216 "VK_LAYER_GOOGLE_unique_objects"};
217
HasValidationLayer(const std::vector<VkLayerProperties> & layerProps,const char * layerName)218 bool HasValidationLayer(const std::vector<VkLayerProperties> &layerProps, const char *layerName)
219 {
220 for (const auto &layerProp : layerProps)
221 {
222 if (std::string(layerProp.layerName) == layerName)
223 {
224 return true;
225 }
226 }
227
228 return false;
229 }
230
HasKhronosValidationLayer(const std::vector<VkLayerProperties> & layerProps)231 bool HasKhronosValidationLayer(const std::vector<VkLayerProperties> &layerProps)
232 {
233 return HasValidationLayer(layerProps, kVkKhronosValidationLayerName);
234 }
235
HasStandardValidationLayer(const std::vector<VkLayerProperties> & layerProps)236 bool HasStandardValidationLayer(const std::vector<VkLayerProperties> &layerProps)
237 {
238 return HasValidationLayer(layerProps, kVkStandardValidationLayerName);
239 }
240
HasValidationLayers(const std::vector<VkLayerProperties> & layerProps)241 bool HasValidationLayers(const std::vector<VkLayerProperties> &layerProps)
242 {
243 for (const char *layerName : kVkValidationLayerNames)
244 {
245 if (!HasValidationLayer(layerProps, layerName))
246 {
247 return false;
248 }
249 }
250
251 return true;
252 }
253 } // anonymous namespace
254
VulkanResultString(VkResult result)255 const char *VulkanResultString(VkResult result)
256 {
257 switch (result)
258 {
259 case VK_SUCCESS:
260 return "Command successfully completed";
261 case VK_NOT_READY:
262 return "A fence or query has not yet completed";
263 case VK_TIMEOUT:
264 return "A wait operation has not completed in the specified time";
265 case VK_EVENT_SET:
266 return "An event is signaled";
267 case VK_EVENT_RESET:
268 return "An event is unsignaled";
269 case VK_INCOMPLETE:
270 return "A return array was too small for the result";
271 case VK_SUBOPTIMAL_KHR:
272 return "A swapchain no longer matches the surface properties exactly, but can still be "
273 "used to present to the surface successfully";
274 case VK_ERROR_OUT_OF_HOST_MEMORY:
275 return "A host memory allocation has failed";
276 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
277 return "A device memory allocation has failed";
278 case VK_ERROR_INITIALIZATION_FAILED:
279 return "Initialization of an object could not be completed for implementation-specific "
280 "reasons";
281 case VK_ERROR_DEVICE_LOST:
282 return "The logical or physical device has been lost";
283 case VK_ERROR_MEMORY_MAP_FAILED:
284 return "Mapping of a memory object has failed";
285 case VK_ERROR_LAYER_NOT_PRESENT:
286 return "A requested layer is not present or could not be loaded";
287 case VK_ERROR_EXTENSION_NOT_PRESENT:
288 return "A requested extension is not supported";
289 case VK_ERROR_FEATURE_NOT_PRESENT:
290 return "A requested feature is not supported";
291 case VK_ERROR_INCOMPATIBLE_DRIVER:
292 return "The requested version of Vulkan is not supported by the driver or is otherwise "
293 "incompatible for implementation-specific reasons";
294 case VK_ERROR_TOO_MANY_OBJECTS:
295 return "Too many objects of the type have already been created";
296 case VK_ERROR_FORMAT_NOT_SUPPORTED:
297 return "A requested format is not supported on this device";
298 case VK_ERROR_SURFACE_LOST_KHR:
299 return "A surface is no longer available";
300 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
301 return "The requested window is already connected to a VkSurfaceKHR, or to some other "
302 "non-Vulkan API";
303 case VK_ERROR_OUT_OF_DATE_KHR:
304 return "A surface has changed in such a way that it is no longer compatible with the "
305 "swapchain";
306 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
307 return "The display used by a swapchain does not use the same presentable image "
308 "layout, or is incompatible in a way that prevents sharing an image";
309 case VK_ERROR_VALIDATION_FAILED_EXT:
310 return "The validation layers detected invalid API usage";
311 case VK_ERROR_INVALID_SHADER_NV:
312 return "Invalid Vulkan shader was generated";
313 case VK_ERROR_OUT_OF_POOL_MEMORY:
314 return "A pool memory allocation has failed";
315 case VK_ERROR_FRAGMENTED_POOL:
316 return "A pool allocation has failed due to fragmentation of the pool's memory";
317 case VK_ERROR_INVALID_EXTERNAL_HANDLE:
318 return "An external handle is not a valid handle of the specified type";
319 default:
320 return "Unknown vulkan error code";
321 }
322 }
323
GetAvailableValidationLayers(const std::vector<VkLayerProperties> & layerProps,bool mustHaveLayers,VulkanLayerVector * enabledLayerNames)324 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
325 bool mustHaveLayers,
326 VulkanLayerVector *enabledLayerNames)
327 {
328 // Favor unified Khronos layer, but fallback to standard validation
329 if (HasKhronosValidationLayer(layerProps))
330 {
331 enabledLayerNames->push_back(kVkKhronosValidationLayerName);
332 }
333 else if (HasStandardValidationLayer(layerProps))
334 {
335 enabledLayerNames->push_back(kVkStandardValidationLayerName);
336 }
337 else if (HasValidationLayers(layerProps))
338 {
339 for (const char *layerName : kVkValidationLayerNames)
340 {
341 enabledLayerNames->push_back(layerName);
342 }
343 }
344 else
345 {
346 // Generate an error if the layers were explicitly requested, warning otherwise.
347 if (mustHaveLayers)
348 {
349 ERR() << "Vulkan validation layers are missing.";
350 }
351 else
352 {
353 WARN() << "Vulkan validation layers are missing.";
354 }
355
356 return false;
357 }
358
359 return true;
360 }
361
362 namespace vk
363 {
364 const char *gLoaderLayersPathEnv = "VK_LAYER_PATH";
365 const char *gLoaderICDFilenamesEnv = "VK_ICD_FILENAMES";
366 const char *gANGLEPreferredDevice = "ANGLE_PREFERRED_DEVICE";
367
GetDepthStencilAspectFlags(const angle::Format & format)368 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format)
369 {
370 return (format.depthBits > 0 ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) |
371 (format.stencilBits > 0 ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
372 }
373
GetFormatAspectFlags(const angle::Format & format)374 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
375 {
376 VkImageAspectFlags dsAspect = GetDepthStencilAspectFlags(format);
377 // If the image is not depth stencil, assume color aspect. Note that detecting color formats
378 // is less trivial than depth/stencil, e.g. as block formats don't indicate any bits for RGBA
379 // channels.
380 return dsAspect != 0 ? dsAspect : VK_IMAGE_ASPECT_COLOR_BIT;
381 }
382
383 // Context implementation.
Context(RendererVk * renderer)384 Context::Context(RendererVk *renderer) : mRenderer(renderer) {}
385
~Context()386 Context::~Context() {}
387
getDevice() const388 VkDevice Context::getDevice() const
389 {
390 return mRenderer->getDevice();
391 }
392
393 // MemoryProperties implementation.
MemoryProperties()394 MemoryProperties::MemoryProperties() : mMemoryProperties{} {}
395
init(VkPhysicalDevice physicalDevice)396 void MemoryProperties::init(VkPhysicalDevice physicalDevice)
397 {
398 ASSERT(mMemoryProperties.memoryTypeCount == 0);
399 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &mMemoryProperties);
400 ASSERT(mMemoryProperties.memoryTypeCount > 0);
401 }
402
destroy()403 void MemoryProperties::destroy()
404 {
405 mMemoryProperties = {};
406 }
407
hasLazilyAllocatedMemory() const408 bool MemoryProperties::hasLazilyAllocatedMemory() const
409 {
410 for (uint32_t typeIndex = 0; typeIndex < mMemoryProperties.memoryTypeCount; ++typeIndex)
411 {
412 const VkMemoryType &memoryType = mMemoryProperties.memoryTypes[typeIndex];
413 if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
414 {
415 return true;
416 }
417 }
418 return false;
419 }
420
findCompatibleMemoryIndex(Context * context,const VkMemoryRequirements & memoryRequirements,VkMemoryPropertyFlags requestedMemoryPropertyFlags,bool isExternalMemory,VkMemoryPropertyFlags * memoryPropertyFlagsOut,uint32_t * typeIndexOut) const421 angle::Result MemoryProperties::findCompatibleMemoryIndex(
422 Context *context,
423 const VkMemoryRequirements &memoryRequirements,
424 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
425 bool isExternalMemory,
426 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
427 uint32_t *typeIndexOut) const
428 {
429 ASSERT(mMemoryProperties.memoryTypeCount > 0 && mMemoryProperties.memoryTypeCount <= 32);
430
431 // Find a compatible memory pool index. If the index doesn't change, we could cache it.
432 // Not finding a valid memory pool means an out-of-spec driver, or internal error.
433 // TODO(jmadill): Determine if it is possible to cache indexes.
434 // TODO(jmadill): More efficient memory allocation.
435 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, requestedMemoryPropertyFlags,
436 memoryPropertyFlagsOut, typeIndexOut))
437 {
438 return angle::Result::Continue;
439 }
440
441 // We did not find a compatible memory type. If the caller wanted a host visible memory, just
442 // return the memory index with fallback, guaranteed, memory flags.
443 if (requestedMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
444 {
445 // The Vulkan spec says the following -
446 // There must be at least one memory type with both the
447 // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT and VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
448 // bits set in its propertyFlags
449 constexpr VkMemoryPropertyFlags fallbackMemoryPropertyFlags =
450 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
451
452 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements, fallbackMemoryPropertyFlags,
453 memoryPropertyFlagsOut, typeIndexOut))
454 {
455 return angle::Result::Continue;
456 }
457 }
458
459 // We did not find a compatible memory type. When importing external memory, there may be
460 // additional restrictions on memoryType. Fallback to requesting device local memory.
461 if (isExternalMemory)
462 {
463 // The Vulkan spec says the following -
464 // There must be at least one memory type with the VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
465 // bit set in its propertyFlags
466 if (FindCompatibleMemory(mMemoryProperties, memoryRequirements,
467 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, memoryPropertyFlagsOut,
468 typeIndexOut))
469 {
470 return angle::Result::Continue;
471 }
472 }
473
474 // TODO(jmadill): Add error message to error.
475 context->handleError(VK_ERROR_INCOMPATIBLE_DRIVER, __FILE__, ANGLE_FUNCTION, __LINE__);
476 return angle::Result::Stop;
477 }
478
479 // BufferMemory implementation.
BufferMemory()480 BufferMemory::BufferMemory() : mClientBuffer(nullptr), mMappedMemory(nullptr) {}
481
482 BufferMemory::~BufferMemory() = default;
483
initExternal(void * clientBuffer)484 angle::Result BufferMemory::initExternal(void *clientBuffer)
485 {
486 ASSERT(clientBuffer != nullptr);
487 mClientBuffer = clientBuffer;
488 return angle::Result::Continue;
489 }
490
init()491 angle::Result BufferMemory::init()
492 {
493 ASSERT(mClientBuffer == nullptr);
494 return angle::Result::Continue;
495 }
496
unmap(RendererVk * renderer)497 void BufferMemory::unmap(RendererVk *renderer)
498 {
499 if (mMappedMemory != nullptr)
500 {
501 if (isExternalBuffer())
502 {
503 mExternalMemory.unmap(renderer->getDevice());
504 }
505 else
506 {
507 mAllocation.unmap(renderer->getAllocator());
508 }
509
510 mMappedMemory = nullptr;
511 }
512 }
513
destroy(RendererVk * renderer)514 void BufferMemory::destroy(RendererVk *renderer)
515 {
516 if (isExternalBuffer())
517 {
518 mExternalMemory.destroy(renderer->getDevice());
519 ReleaseAndroidExternalMemory(renderer, mClientBuffer);
520 }
521 else
522 {
523 mAllocation.destroy(renderer->getAllocator());
524 }
525 }
526
flush(RendererVk * renderer,VkMemoryMapFlags memoryPropertyFlags,VkDeviceSize offset,VkDeviceSize size)527 void BufferMemory::flush(RendererVk *renderer,
528 VkMemoryMapFlags memoryPropertyFlags,
529 VkDeviceSize offset,
530 VkDeviceSize size)
531 {
532 if (isExternalBuffer())
533 {
534 // if the memory type is not host coherent, we perform an explicit flush
535 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
536 {
537 VkMappedMemoryRange mappedRange = {};
538 mappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
539 mappedRange.memory = mExternalMemory.getHandle();
540 mappedRange.offset = offset;
541 mappedRange.size = size;
542 mExternalMemory.flush(renderer->getDevice(), mappedRange);
543 }
544 }
545 else
546 {
547 mAllocation.flush(renderer->getAllocator(), offset, size);
548 }
549 }
550
invalidate(RendererVk * renderer,VkMemoryMapFlags memoryPropertyFlags,VkDeviceSize offset,VkDeviceSize size)551 void BufferMemory::invalidate(RendererVk *renderer,
552 VkMemoryMapFlags memoryPropertyFlags,
553 VkDeviceSize offset,
554 VkDeviceSize size)
555 {
556 if (isExternalBuffer())
557 {
558 // if the memory type is not device coherent, we perform an explicit invalidate
559 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD) == 0)
560 {
561 VkMappedMemoryRange memoryRanges = {};
562 memoryRanges.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
563 memoryRanges.memory = mExternalMemory.getHandle();
564 memoryRanges.offset = offset;
565 memoryRanges.size = size;
566 mExternalMemory.invalidate(renderer->getDevice(), memoryRanges);
567 }
568 }
569 else
570 {
571 mAllocation.invalidate(renderer->getAllocator(), offset, size);
572 }
573 }
574
mapImpl(ContextVk * contextVk,VkDeviceSize size)575 angle::Result BufferMemory::mapImpl(ContextVk *contextVk, VkDeviceSize size)
576 {
577 if (isExternalBuffer())
578 {
579 ANGLE_VK_TRY(contextVk, mExternalMemory.map(contextVk->getRenderer()->getDevice(), 0, size,
580 0, &mMappedMemory));
581 }
582 else
583 {
584 ANGLE_VK_TRY(contextVk,
585 mAllocation.map(contextVk->getRenderer()->getAllocator(), &mMappedMemory));
586 }
587
588 return angle::Result::Continue;
589 }
590
591 // StagingBuffer implementation.
StagingBuffer()592 StagingBuffer::StagingBuffer() : mSize(0) {}
593
destroy(RendererVk * renderer)594 void StagingBuffer::destroy(RendererVk *renderer)
595 {
596 VkDevice device = renderer->getDevice();
597 mBuffer.destroy(device);
598 mAllocation.destroy(renderer->getAllocator());
599 mSize = 0;
600 }
601
init(Context * context,VkDeviceSize size,StagingUsage usage)602 angle::Result StagingBuffer::init(Context *context, VkDeviceSize size, StagingUsage usage)
603 {
604 VkBufferCreateInfo createInfo = {};
605 createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
606 createInfo.flags = 0;
607 createInfo.size = size;
608 createInfo.usage = GetStagingBufferUsageFlags(usage);
609 createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
610 createInfo.queueFamilyIndexCount = 0;
611 createInfo.pQueueFamilyIndices = nullptr;
612
613 VkMemoryPropertyFlags preferredFlags = 0;
614 VkMemoryPropertyFlags requiredFlags =
615 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
616
617 RendererVk *renderer = context->getRenderer();
618
619 uint32_t memoryTypeIndex = 0;
620 BufferMemoryAllocator &bufferMemoryAllocator = renderer->getBufferMemoryAllocator();
621 ANGLE_VK_TRY(context, bufferMemoryAllocator.createBuffer(
622 renderer, createInfo, requiredFlags, preferredFlags,
623 renderer->getFeatures().persistentlyMappedBuffers.enabled,
624 &memoryTypeIndex, &mBuffer, &mAllocation));
625 mSize = static_cast<size_t>(size);
626
627 // Wipe memory to an invalid value when the 'allocateNonZeroMemory' feature is enabled. The
628 // invalid values ensures our testing doesn't assume zero-initialized memory.
629 if (renderer->getFeatures().allocateNonZeroMemory.enabled)
630 {
631 const Allocator &allocator = renderer->getAllocator();
632 ANGLE_TRY(InitMappableAllocation(context, allocator, &mAllocation, size, kNonZeroInitValue,
633 requiredFlags));
634 }
635
636 return angle::Result::Continue;
637 }
638
release(ContextVk * contextVk)639 void StagingBuffer::release(ContextVk *contextVk)
640 {
641 contextVk->addGarbage(&mBuffer);
642 contextVk->addGarbage(&mAllocation);
643 }
644
collectGarbage(RendererVk * renderer,Serial serial)645 void StagingBuffer::collectGarbage(RendererVk *renderer, Serial serial)
646 {
647 GarbageList garbageList;
648 garbageList.emplace_back(GetGarbage(&mBuffer));
649 garbageList.emplace_back(GetGarbage(&mAllocation));
650
651 SharedResourceUse sharedUse;
652 sharedUse.init();
653 sharedUse.updateSerialOneOff(serial);
654 renderer->collectGarbage(std::move(sharedUse), std::move(garbageList));
655 }
656
InitMappableAllocation(Context * context,const Allocator & allocator,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)657 angle::Result InitMappableAllocation(Context *context,
658 const Allocator &allocator,
659 Allocation *allocation,
660 VkDeviceSize size,
661 int value,
662 VkMemoryPropertyFlags memoryPropertyFlags)
663 {
664 uint8_t *mapPointer;
665 ANGLE_VK_TRY(context, allocation->map(allocator, &mapPointer));
666 memset(mapPointer, value, static_cast<size_t>(size));
667
668 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
669 {
670 allocation->flush(allocator, 0, size);
671 }
672
673 allocation->unmap(allocator);
674
675 return angle::Result::Continue;
676 }
677
InitMappableDeviceMemory(Context * context,DeviceMemory * deviceMemory,VkDeviceSize size,int value,VkMemoryPropertyFlags memoryPropertyFlags)678 angle::Result InitMappableDeviceMemory(Context *context,
679 DeviceMemory *deviceMemory,
680 VkDeviceSize size,
681 int value,
682 VkMemoryPropertyFlags memoryPropertyFlags)
683 {
684 VkDevice device = context->getDevice();
685
686 uint8_t *mapPointer;
687 ANGLE_VK_TRY(context, deviceMemory->map(device, 0, VK_WHOLE_SIZE, 0, &mapPointer));
688 memset(mapPointer, value, static_cast<size_t>(size));
689
690 // if the memory type is not host coherent, we perform an explicit flush
691 if ((memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
692 {
693 VkMappedMemoryRange mappedRange = {};
694 mappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
695 mappedRange.memory = deviceMemory->getHandle();
696 mappedRange.size = VK_WHOLE_SIZE;
697 ANGLE_VK_TRY(context, vkFlushMappedMemoryRanges(device, 1, &mappedRange));
698 }
699
700 deviceMemory->unmap(device);
701
702 return angle::Result::Continue;
703 }
704
AllocateBufferMemory(Context * context,VkMemoryPropertyFlags requestedMemoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Buffer * buffer,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)705 angle::Result AllocateBufferMemory(Context *context,
706 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
707 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
708 const void *extraAllocationInfo,
709 Buffer *buffer,
710 DeviceMemory *deviceMemoryOut,
711 VkDeviceSize *sizeOut)
712 {
713 return AllocateBufferOrImageMemory(context, requestedMemoryPropertyFlags,
714 memoryPropertyFlagsOut, extraAllocationInfo, buffer,
715 deviceMemoryOut, sizeOut);
716 }
717
AllocateImageMemory(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,VkMemoryPropertyFlags * memoryPropertyFlagsOut,const void * extraAllocationInfo,Image * image,DeviceMemory * deviceMemoryOut,VkDeviceSize * sizeOut)718 angle::Result AllocateImageMemory(Context *context,
719 VkMemoryPropertyFlags memoryPropertyFlags,
720 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
721 const void *extraAllocationInfo,
722 Image *image,
723 DeviceMemory *deviceMemoryOut,
724 VkDeviceSize *sizeOut)
725 {
726 return AllocateBufferOrImageMemory(context, memoryPropertyFlags, memoryPropertyFlagsOut,
727 extraAllocationInfo, image, deviceMemoryOut, sizeOut);
728 }
729
AllocateImageMemoryWithRequirements(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,const VkBindImagePlaneMemoryInfoKHR * extraBindInfo,Image * image,DeviceMemory * deviceMemoryOut)730 angle::Result AllocateImageMemoryWithRequirements(
731 Context *context,
732 VkMemoryPropertyFlags memoryPropertyFlags,
733 const VkMemoryRequirements &memoryRequirements,
734 const void *extraAllocationInfo,
735 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
736 Image *image,
737 DeviceMemory *deviceMemoryOut)
738 {
739 VkMemoryPropertyFlags memoryPropertyFlagsOut = 0;
740 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, &memoryPropertyFlagsOut,
741 memoryRequirements, extraAllocationInfo,
742 extraBindInfo, image, deviceMemoryOut);
743 }
744
AllocateBufferMemoryWithRequirements(Context * context,VkMemoryPropertyFlags memoryPropertyFlags,const VkMemoryRequirements & memoryRequirements,const void * extraAllocationInfo,Buffer * buffer,VkMemoryPropertyFlags * memoryPropertyFlagsOut,DeviceMemory * deviceMemoryOut)745 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
746 VkMemoryPropertyFlags memoryPropertyFlags,
747 const VkMemoryRequirements &memoryRequirements,
748 const void *extraAllocationInfo,
749 Buffer *buffer,
750 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
751 DeviceMemory *deviceMemoryOut)
752 {
753 return AllocateAndBindBufferOrImageMemory(context, memoryPropertyFlags, memoryPropertyFlagsOut,
754 memoryRequirements, extraAllocationInfo, nullptr,
755 buffer, deviceMemoryOut);
756 }
757
InitShaderAndSerial(Context * context,ShaderAndSerial * shaderAndSerial,const uint32_t * shaderCode,size_t shaderCodeSize)758 angle::Result InitShaderAndSerial(Context *context,
759 ShaderAndSerial *shaderAndSerial,
760 const uint32_t *shaderCode,
761 size_t shaderCodeSize)
762 {
763 VkShaderModuleCreateInfo createInfo = {};
764 createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
765 createInfo.flags = 0;
766 createInfo.codeSize = shaderCodeSize;
767 createInfo.pCode = shaderCode;
768
769 ANGLE_VK_TRY(context, shaderAndSerial->get().init(context->getDevice(), createInfo));
770 shaderAndSerial->updateSerial(context->getRenderer()->issueShaderSerial());
771 return angle::Result::Continue;
772 }
773
Get2DTextureType(uint32_t layerCount,GLint samples)774 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples)
775 {
776 if (layerCount > 1)
777 {
778 if (samples > 1)
779 {
780 return gl::TextureType::_2DMultisampleArray;
781 }
782 else
783 {
784 return gl::TextureType::_2DArray;
785 }
786 }
787 else
788 {
789 if (samples > 1)
790 {
791 return gl::TextureType::_2DMultisample;
792 }
793 else
794 {
795 return gl::TextureType::_2D;
796 }
797 }
798 }
799
GarbageObject()800 GarbageObject::GarbageObject() : mHandleType(HandleType::Invalid), mHandle(VK_NULL_HANDLE) {}
801
GarbageObject(HandleType handleType,GarbageHandle handle)802 GarbageObject::GarbageObject(HandleType handleType, GarbageHandle handle)
803 : mHandleType(handleType), mHandle(handle)
804 {}
805
GarbageObject(GarbageObject && other)806 GarbageObject::GarbageObject(GarbageObject &&other) : GarbageObject()
807 {
808 *this = std::move(other);
809 }
810
operator =(GarbageObject && rhs)811 GarbageObject &GarbageObject::operator=(GarbageObject &&rhs)
812 {
813 std::swap(mHandle, rhs.mHandle);
814 std::swap(mHandleType, rhs.mHandleType);
815 return *this;
816 }
817
818 // GarbageObject implementation
819 // Using c-style casts here to avoid conditional compile for MSVC 32-bit
820 // which fails to compile with reinterpret_cast, requiring static_cast.
destroy(RendererVk * renderer)821 void GarbageObject::destroy(RendererVk *renderer)
822 {
823 ANGLE_TRACE_EVENT0("gpu.angle", "GarbageObject::destroy");
824 VkDevice device = renderer->getDevice();
825 switch (mHandleType)
826 {
827 case HandleType::Semaphore:
828 vkDestroySemaphore(device, (VkSemaphore)mHandle, nullptr);
829 break;
830 case HandleType::CommandBuffer:
831 // Command buffers are pool allocated.
832 UNREACHABLE();
833 break;
834 case HandleType::Event:
835 vkDestroyEvent(device, (VkEvent)mHandle, nullptr);
836 break;
837 case HandleType::Fence:
838 vkDestroyFence(device, (VkFence)mHandle, nullptr);
839 break;
840 case HandleType::DeviceMemory:
841 vkFreeMemory(device, (VkDeviceMemory)mHandle, nullptr);
842 break;
843 case HandleType::Buffer:
844 vkDestroyBuffer(device, (VkBuffer)mHandle, nullptr);
845 break;
846 case HandleType::BufferView:
847 vkDestroyBufferView(device, (VkBufferView)mHandle, nullptr);
848 break;
849 case HandleType::Image:
850 vkDestroyImage(device, (VkImage)mHandle, nullptr);
851 break;
852 case HandleType::ImageView:
853 vkDestroyImageView(device, (VkImageView)mHandle, nullptr);
854 break;
855 case HandleType::ShaderModule:
856 vkDestroyShaderModule(device, (VkShaderModule)mHandle, nullptr);
857 break;
858 case HandleType::PipelineLayout:
859 vkDestroyPipelineLayout(device, (VkPipelineLayout)mHandle, nullptr);
860 break;
861 case HandleType::RenderPass:
862 vkDestroyRenderPass(device, (VkRenderPass)mHandle, nullptr);
863 break;
864 case HandleType::Pipeline:
865 vkDestroyPipeline(device, (VkPipeline)mHandle, nullptr);
866 break;
867 case HandleType::DescriptorSetLayout:
868 vkDestroyDescriptorSetLayout(device, (VkDescriptorSetLayout)mHandle, nullptr);
869 break;
870 case HandleType::Sampler:
871 vkDestroySampler(device, (VkSampler)mHandle, nullptr);
872 break;
873 case HandleType::DescriptorPool:
874 vkDestroyDescriptorPool(device, (VkDescriptorPool)mHandle, nullptr);
875 break;
876 case HandleType::Framebuffer:
877 vkDestroyFramebuffer(device, (VkFramebuffer)mHandle, nullptr);
878 break;
879 case HandleType::CommandPool:
880 vkDestroyCommandPool(device, (VkCommandPool)mHandle, nullptr);
881 break;
882 case HandleType::QueryPool:
883 vkDestroyQueryPool(device, (VkQueryPool)mHandle, nullptr);
884 break;
885 case HandleType::Allocation:
886 vma::FreeMemory(renderer->getAllocator().getHandle(), (VmaAllocation)mHandle);
887 break;
888 default:
889 UNREACHABLE();
890 break;
891 }
892
893 renderer->onDeallocateHandle(mHandleType);
894 }
895
MakeDebugUtilsLabel(GLenum source,const char * marker,VkDebugUtilsLabelEXT * label)896 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label)
897 {
898 static constexpr angle::ColorF kLabelColors[6] = {
899 angle::ColorF(1.0f, 0.5f, 0.5f, 1.0f), // DEBUG_SOURCE_API
900 angle::ColorF(0.5f, 1.0f, 0.5f, 1.0f), // DEBUG_SOURCE_WINDOW_SYSTEM
901 angle::ColorF(0.5f, 0.5f, 1.0f, 1.0f), // DEBUG_SOURCE_SHADER_COMPILER
902 angle::ColorF(0.7f, 0.7f, 0.7f, 1.0f), // DEBUG_SOURCE_THIRD_PARTY
903 angle::ColorF(0.5f, 0.8f, 0.9f, 1.0f), // DEBUG_SOURCE_APPLICATION
904 angle::ColorF(0.9f, 0.8f, 0.5f, 1.0f), // DEBUG_SOURCE_OTHER
905 };
906
907 int colorIndex = source - GL_DEBUG_SOURCE_API;
908 ASSERT(colorIndex >= 0 && static_cast<size_t>(colorIndex) < ArraySize(kLabelColors));
909
910 label->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
911 label->pNext = nullptr;
912 label->pLabelName = marker;
913 kLabelColors[colorIndex].writeData(label->color);
914 }
915
916 // ClearValuesArray implementation.
ClearValuesArray()917 ClearValuesArray::ClearValuesArray() : mValues{}, mEnabled{} {}
918
919 ClearValuesArray::~ClearValuesArray() = default;
920
921 ClearValuesArray::ClearValuesArray(const ClearValuesArray &other) = default;
922
923 ClearValuesArray &ClearValuesArray::operator=(const ClearValuesArray &rhs) = default;
924
store(uint32_t index,VkImageAspectFlags aspectFlags,const VkClearValue & clearValue)925 void ClearValuesArray::store(uint32_t index,
926 VkImageAspectFlags aspectFlags,
927 const VkClearValue &clearValue)
928 {
929 ASSERT(aspectFlags != 0);
930
931 // We do this double if to handle the packed depth-stencil case.
932 if ((aspectFlags & VK_IMAGE_ASPECT_STENCIL_BIT) != 0)
933 {
934 // Ensure for packed DS we're writing to the depth index.
935 ASSERT(index == kUnpackedDepthIndex ||
936 (index == kUnpackedStencilIndex && aspectFlags == VK_IMAGE_ASPECT_STENCIL_BIT));
937
938 storeNoDepthStencil(kUnpackedStencilIndex, clearValue);
939 }
940
941 if (aspectFlags != VK_IMAGE_ASPECT_STENCIL_BIT)
942 {
943 storeNoDepthStencil(index, clearValue);
944 }
945 }
946
storeNoDepthStencil(uint32_t index,const VkClearValue & clearValue)947 void ClearValuesArray::storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue)
948 {
949 mValues[index] = clearValue;
950 mEnabled.set(index);
951 }
952
getColorMask() const953 gl::DrawBufferMask ClearValuesArray::getColorMask() const
954 {
955 constexpr uint32_t kColorBuffersMask =
956 angle::BitMask<uint32_t>(gl::IMPLEMENTATION_MAX_DRAW_BUFFERS);
957 return gl::DrawBufferMask(mEnabled.bits() & kColorBuffersMask);
958 }
959
960 // ResourceSerialFactory implementation.
ResourceSerialFactory()961 ResourceSerialFactory::ResourceSerialFactory() : mCurrentUniqueSerial(1) {}
962
~ResourceSerialFactory()963 ResourceSerialFactory::~ResourceSerialFactory() {}
964
issueSerial()965 uint32_t ResourceSerialFactory::issueSerial()
966 {
967 uint32_t newSerial = ++mCurrentUniqueSerial;
968 // make sure serial does not wrap
969 ASSERT(newSerial > 0);
970 return newSerial;
971 }
972
973 #define ANGLE_DEFINE_GEN_VK_SERIAL(Type) \
974 Type##Serial ResourceSerialFactory::generate##Type##Serial() \
975 { \
976 return Type##Serial(issueSerial()); \
977 }
978
ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)979 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_GEN_VK_SERIAL)
980
981 void ClampViewport(VkViewport *viewport)
982 {
983 // 0-sized viewports are invalid in Vulkan.
984 ASSERT(viewport);
985 if (viewport->width == 0.0f)
986 {
987 viewport->width = 1.0f;
988 }
989 if (viewport->height == 0.0f)
990 {
991 viewport->height = 1.0f;
992 }
993 }
994
995 } // namespace vk
996
997 #if !defined(ANGLE_SHARED_LIBVULKAN)
998 // VK_EXT_debug_utils
999 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = nullptr;
1000 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = nullptr;
1001 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = nullptr;
1002 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = nullptr;
1003 PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = nullptr;
1004
1005 // VK_EXT_debug_report
1006 PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
1007 PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
1008
1009 // VK_KHR_get_physical_device_properties2
1010 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = nullptr;
1011 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = nullptr;
1012 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = nullptr;
1013
1014 // VK_KHR_external_semaphore_fd
1015 PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = nullptr;
1016
1017 // VK_EXT_external_memory_host
1018 PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = nullptr;
1019
1020 // VK_EXT_host_query_reset
1021 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = nullptr;
1022
1023 // VK_EXT_transform_feedback
1024 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = nullptr;
1025 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = nullptr;
1026 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = nullptr;
1027 PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = nullptr;
1028 PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = nullptr;
1029 PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = nullptr;
1030
1031 // VK_KHR_get_memory_requirements2
1032 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = nullptr;
1033 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = nullptr;
1034
1035 // VK_KHR_bind_memory2
1036 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = nullptr;
1037 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = nullptr;
1038
1039 // VK_KHR_external_fence_capabilities
1040 PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR =
1041 nullptr;
1042
1043 // VK_KHR_external_fence_fd
1044 PFN_vkGetFenceFdKHR vkGetFenceFdKHR = nullptr;
1045 PFN_vkImportFenceFdKHR vkImportFenceFdKHR = nullptr;
1046
1047 // VK_KHR_external_semaphore_capabilities
1048 PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR
1049 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr;
1050
1051 // VK_KHR_sampler_ycbcr_conversion
1052 PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = nullptr;
1053 PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = nullptr;
1054
1055 // VK_KHR_create_renderpass2
1056 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = nullptr;
1057
1058 # if defined(ANGLE_PLATFORM_FUCHSIA)
1059 // VK_FUCHSIA_imagepipe_surface
1060 PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = nullptr;
1061 # endif
1062
1063 # if defined(ANGLE_PLATFORM_ANDROID)
1064 PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID =
1065 nullptr;
1066 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = nullptr;
1067 # endif
1068
1069 # if defined(ANGLE_PLATFORM_GGP)
1070 PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = nullptr;
1071 # endif
1072
1073 # define GET_INSTANCE_FUNC(vkName) \
1074 do \
1075 { \
1076 vkName = reinterpret_cast<PFN_##vkName>(vkGetInstanceProcAddr(instance, #vkName)); \
1077 ASSERT(vkName); \
1078 } while (0)
1079
1080 # define GET_DEVICE_FUNC(vkName) \
1081 do \
1082 { \
1083 vkName = reinterpret_cast<PFN_##vkName>(vkGetDeviceProcAddr(device, #vkName)); \
1084 ASSERT(vkName); \
1085 } while (0)
1086
1087 // VK_KHR_shared_presentable_image
1088 PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = nullptr;
1089
InitDebugUtilsEXTFunctions(VkInstance instance)1090 void InitDebugUtilsEXTFunctions(VkInstance instance)
1091 {
1092 GET_INSTANCE_FUNC(vkCreateDebugUtilsMessengerEXT);
1093 GET_INSTANCE_FUNC(vkDestroyDebugUtilsMessengerEXT);
1094 GET_INSTANCE_FUNC(vkCmdBeginDebugUtilsLabelEXT);
1095 GET_INSTANCE_FUNC(vkCmdEndDebugUtilsLabelEXT);
1096 GET_INSTANCE_FUNC(vkCmdInsertDebugUtilsLabelEXT);
1097 }
1098
InitDebugReportEXTFunctions(VkInstance instance)1099 void InitDebugReportEXTFunctions(VkInstance instance)
1100 {
1101 GET_INSTANCE_FUNC(vkCreateDebugReportCallbackEXT);
1102 GET_INSTANCE_FUNC(vkDestroyDebugReportCallbackEXT);
1103 }
1104
InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)1105 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance)
1106 {
1107 GET_INSTANCE_FUNC(vkGetPhysicalDeviceProperties2KHR);
1108 GET_INSTANCE_FUNC(vkGetPhysicalDeviceFeatures2KHR);
1109 GET_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR);
1110 }
1111
InitTransformFeedbackEXTFunctions(VkDevice device)1112 void InitTransformFeedbackEXTFunctions(VkDevice device)
1113 {
1114 GET_DEVICE_FUNC(vkCmdBindTransformFeedbackBuffersEXT);
1115 GET_DEVICE_FUNC(vkCmdBeginTransformFeedbackEXT);
1116 GET_DEVICE_FUNC(vkCmdEndTransformFeedbackEXT);
1117 GET_DEVICE_FUNC(vkCmdBeginQueryIndexedEXT);
1118 GET_DEVICE_FUNC(vkCmdEndQueryIndexedEXT);
1119 GET_DEVICE_FUNC(vkCmdDrawIndirectByteCountEXT);
1120 }
1121
1122 // VK_KHR_sampler_ycbcr_conversion
InitSamplerYcbcrKHRFunctions(VkDevice device)1123 void InitSamplerYcbcrKHRFunctions(VkDevice device)
1124 {
1125 GET_DEVICE_FUNC(vkCreateSamplerYcbcrConversionKHR);
1126 GET_DEVICE_FUNC(vkDestroySamplerYcbcrConversionKHR);
1127 }
1128
1129 // VK_KHR_create_renderpass2
InitRenderPass2KHRFunctions(VkDevice device)1130 void InitRenderPass2KHRFunctions(VkDevice device)
1131 {
1132 GET_DEVICE_FUNC(vkCreateRenderPass2KHR);
1133 }
1134
1135 # if defined(ANGLE_PLATFORM_FUCHSIA)
InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)1136 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance)
1137 {
1138 GET_INSTANCE_FUNC(vkCreateImagePipeSurfaceFUCHSIA);
1139 }
1140 # endif
1141
1142 # if defined(ANGLE_PLATFORM_ANDROID)
InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)1143 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance)
1144 {
1145 GET_INSTANCE_FUNC(vkGetAndroidHardwareBufferPropertiesANDROID);
1146 GET_INSTANCE_FUNC(vkGetMemoryAndroidHardwareBufferANDROID);
1147 }
1148 # endif
1149
1150 # if defined(ANGLE_PLATFORM_GGP)
InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)1151 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance)
1152 {
1153 GET_INSTANCE_FUNC(vkCreateStreamDescriptorSurfaceGGP);
1154 }
1155 # endif // defined(ANGLE_PLATFORM_GGP)
1156
InitExternalSemaphoreFdFunctions(VkInstance instance)1157 void InitExternalSemaphoreFdFunctions(VkInstance instance)
1158 {
1159 GET_INSTANCE_FUNC(vkImportSemaphoreFdKHR);
1160 }
1161
InitExternalMemoryHostFunctions(VkInstance instance)1162 void InitExternalMemoryHostFunctions(VkInstance instance)
1163 {
1164 GET_INSTANCE_FUNC(vkGetMemoryHostPointerPropertiesEXT);
1165 }
1166
InitHostQueryResetFunctions(VkInstance instance)1167 void InitHostQueryResetFunctions(VkInstance instance)
1168 {
1169 GET_INSTANCE_FUNC(vkGetMemoryHostPointerPropertiesEXT);
1170 }
1171
1172 // VK_KHR_get_memory_requirements2
InitGetMemoryRequirements2KHRFunctions(VkDevice device)1173 void InitGetMemoryRequirements2KHRFunctions(VkDevice device)
1174 {
1175 GET_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR);
1176 GET_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR);
1177 }
1178
1179 // VK_KHR_bind_memory2
InitBindMemory2KHRFunctions(VkDevice device)1180 void InitBindMemory2KHRFunctions(VkDevice device)
1181 {
1182 GET_DEVICE_FUNC(vkBindBufferMemory2KHR);
1183 GET_DEVICE_FUNC(vkBindImageMemory2KHR);
1184 }
1185
1186 // VK_KHR_external_fence_capabilities
InitExternalFenceCapabilitiesFunctions(VkInstance instance)1187 void InitExternalFenceCapabilitiesFunctions(VkInstance instance)
1188 {
1189 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalFencePropertiesKHR);
1190 }
1191
1192 // VK_KHR_external_fence_fd
InitExternalFenceFdFunctions(VkInstance instance)1193 void InitExternalFenceFdFunctions(VkInstance instance)
1194 {
1195 GET_INSTANCE_FUNC(vkGetFenceFdKHR);
1196 GET_INSTANCE_FUNC(vkImportFenceFdKHR);
1197 }
1198
1199 // VK_KHR_external_semaphore_capabilities
InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)1200 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance)
1201 {
1202 GET_INSTANCE_FUNC(vkGetPhysicalDeviceExternalSemaphorePropertiesKHR);
1203 }
1204
1205 // VK_KHR_shared_presentable_image
InitGetSwapchainStatusKHRFunctions(VkDevice device)1206 void InitGetSwapchainStatusKHRFunctions(VkDevice device)
1207 {
1208 GET_DEVICE_FUNC(vkGetSwapchainStatusKHR);
1209 }
1210
1211 # undef GET_INSTANCE_FUNC
1212 # undef GET_DEVICE_FUNC
1213
1214 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1215
CalculateGenerateMipmapFilter(ContextVk * contextVk,angle::FormatID formatID)1216 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID)
1217 {
1218 const bool formatSupportsLinearFiltering = contextVk->getRenderer()->hasImageFormatFeatureBits(
1219 formatID, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
1220 const bool hintFastest = contextVk->getState().getGenerateMipmapHint() == GL_FASTEST;
1221
1222 return formatSupportsLinearFiltering && !hintFastest ? GL_LINEAR : GL_NEAREST;
1223 }
1224
1225 // Return the log of samples. Assumes |sampleCount| is a power of 2. The result can be used to
1226 // index an array based on sample count. See for example TextureVk::PerSampleCountArray.
PackSampleCount(GLint sampleCount)1227 size_t PackSampleCount(GLint sampleCount)
1228 {
1229 if (sampleCount == 0)
1230 {
1231 sampleCount = 1;
1232 }
1233
1234 // We currently only support up to 16xMSAA.
1235 ASSERT(sampleCount <= VK_SAMPLE_COUNT_16_BIT);
1236 ASSERT(gl::isPow2(sampleCount));
1237 return gl::ScanForward(static_cast<uint32_t>(sampleCount));
1238 }
1239
1240 namespace gl_vk
1241 {
1242
GetFilter(const GLenum filter)1243 VkFilter GetFilter(const GLenum filter)
1244 {
1245 switch (filter)
1246 {
1247 case GL_LINEAR_MIPMAP_LINEAR:
1248 case GL_LINEAR_MIPMAP_NEAREST:
1249 case GL_LINEAR:
1250 return VK_FILTER_LINEAR;
1251 case GL_NEAREST_MIPMAP_LINEAR:
1252 case GL_NEAREST_MIPMAP_NEAREST:
1253 case GL_NEAREST:
1254 return VK_FILTER_NEAREST;
1255 default:
1256 UNIMPLEMENTED();
1257 return VK_FILTER_MAX_ENUM;
1258 }
1259 }
1260
GetSamplerMipmapMode(const GLenum filter)1261 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter)
1262 {
1263 switch (filter)
1264 {
1265 case GL_LINEAR_MIPMAP_LINEAR:
1266 case GL_NEAREST_MIPMAP_LINEAR:
1267 return VK_SAMPLER_MIPMAP_MODE_LINEAR;
1268 case GL_LINEAR:
1269 case GL_NEAREST:
1270 case GL_NEAREST_MIPMAP_NEAREST:
1271 case GL_LINEAR_MIPMAP_NEAREST:
1272 return VK_SAMPLER_MIPMAP_MODE_NEAREST;
1273 default:
1274 UNIMPLEMENTED();
1275 return VK_SAMPLER_MIPMAP_MODE_MAX_ENUM;
1276 }
1277 }
1278
GetSamplerAddressMode(const GLenum wrap)1279 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap)
1280 {
1281 switch (wrap)
1282 {
1283 case GL_REPEAT:
1284 return VK_SAMPLER_ADDRESS_MODE_REPEAT;
1285 case GL_MIRRORED_REPEAT:
1286 return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
1287 case GL_CLAMP_TO_BORDER:
1288 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
1289 case GL_CLAMP_TO_EDGE:
1290 return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1291 default:
1292 UNIMPLEMENTED();
1293 return VK_SAMPLER_ADDRESS_MODE_MAX_ENUM;
1294 }
1295 }
1296
GetRect(const gl::Rectangle & source)1297 VkRect2D GetRect(const gl::Rectangle &source)
1298 {
1299 return {{source.x, source.y},
1300 {static_cast<uint32_t>(source.width), static_cast<uint32_t>(source.height)}};
1301 }
1302
GetPrimitiveTopology(gl::PrimitiveMode mode)1303 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode)
1304 {
1305 switch (mode)
1306 {
1307 case gl::PrimitiveMode::Triangles:
1308 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1309 case gl::PrimitiveMode::Points:
1310 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1311 case gl::PrimitiveMode::Lines:
1312 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
1313 case gl::PrimitiveMode::LineStrip:
1314 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1315 case gl::PrimitiveMode::TriangleFan:
1316 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
1317 case gl::PrimitiveMode::TriangleStrip:
1318 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1319 case gl::PrimitiveMode::LineLoop:
1320 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1321 case gl::PrimitiveMode::LinesAdjacency:
1322 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
1323 case gl::PrimitiveMode::LineStripAdjacency:
1324 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
1325 case gl::PrimitiveMode::TrianglesAdjacency:
1326 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
1327 case gl::PrimitiveMode::TriangleStripAdjacency:
1328 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
1329 case gl::PrimitiveMode::Patches:
1330 return VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
1331 default:
1332 UNREACHABLE();
1333 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1334 }
1335 }
1336
GetCullMode(const gl::RasterizerState & rasterState)1337 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState)
1338 {
1339 if (!rasterState.cullFace)
1340 {
1341 return VK_CULL_MODE_NONE;
1342 }
1343
1344 switch (rasterState.cullMode)
1345 {
1346 case gl::CullFaceMode::Front:
1347 return VK_CULL_MODE_FRONT_BIT;
1348 case gl::CullFaceMode::Back:
1349 return VK_CULL_MODE_BACK_BIT;
1350 case gl::CullFaceMode::FrontAndBack:
1351 return VK_CULL_MODE_FRONT_AND_BACK;
1352 default:
1353 UNREACHABLE();
1354 return VK_CULL_MODE_NONE;
1355 }
1356 }
1357
GetFrontFace(GLenum frontFace,bool invertCullFace)1358 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace)
1359 {
1360 // Invert CW and CCW to have the same behavior as OpenGL.
1361 switch (frontFace)
1362 {
1363 case GL_CW:
1364 return invertCullFace ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE;
1365 case GL_CCW:
1366 return invertCullFace ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
1367 default:
1368 UNREACHABLE();
1369 return VK_FRONT_FACE_CLOCKWISE;
1370 }
1371 }
1372
GetSamples(GLint sampleCount)1373 VkSampleCountFlagBits GetSamples(GLint sampleCount)
1374 {
1375 switch (sampleCount)
1376 {
1377 case 0:
1378 UNREACHABLE();
1379 return VK_SAMPLE_COUNT_1_BIT;
1380 case 1:
1381 return VK_SAMPLE_COUNT_1_BIT;
1382 case 2:
1383 return VK_SAMPLE_COUNT_2_BIT;
1384 case 4:
1385 return VK_SAMPLE_COUNT_4_BIT;
1386 case 8:
1387 return VK_SAMPLE_COUNT_8_BIT;
1388 case 16:
1389 return VK_SAMPLE_COUNT_16_BIT;
1390 case 32:
1391 return VK_SAMPLE_COUNT_32_BIT;
1392 default:
1393 UNREACHABLE();
1394 return VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
1395 }
1396 }
1397
GetSwizzle(const GLenum swizzle)1398 VkComponentSwizzle GetSwizzle(const GLenum swizzle)
1399 {
1400 switch (swizzle)
1401 {
1402 case GL_ALPHA:
1403 return VK_COMPONENT_SWIZZLE_A;
1404 case GL_RED:
1405 return VK_COMPONENT_SWIZZLE_R;
1406 case GL_GREEN:
1407 return VK_COMPONENT_SWIZZLE_G;
1408 case GL_BLUE:
1409 return VK_COMPONENT_SWIZZLE_B;
1410 case GL_ZERO:
1411 return VK_COMPONENT_SWIZZLE_ZERO;
1412 case GL_ONE:
1413 return VK_COMPONENT_SWIZZLE_ONE;
1414 default:
1415 UNREACHABLE();
1416 return VK_COMPONENT_SWIZZLE_IDENTITY;
1417 }
1418 }
1419
GetCompareOp(const GLenum compareFunc)1420 VkCompareOp GetCompareOp(const GLenum compareFunc)
1421 {
1422 switch (compareFunc)
1423 {
1424 case GL_NEVER:
1425 return VK_COMPARE_OP_NEVER;
1426 case GL_LESS:
1427 return VK_COMPARE_OP_LESS;
1428 case GL_EQUAL:
1429 return VK_COMPARE_OP_EQUAL;
1430 case GL_LEQUAL:
1431 return VK_COMPARE_OP_LESS_OR_EQUAL;
1432 case GL_GREATER:
1433 return VK_COMPARE_OP_GREATER;
1434 case GL_NOTEQUAL:
1435 return VK_COMPARE_OP_NOT_EQUAL;
1436 case GL_GEQUAL:
1437 return VK_COMPARE_OP_GREATER_OR_EQUAL;
1438 case GL_ALWAYS:
1439 return VK_COMPARE_OP_ALWAYS;
1440 default:
1441 UNREACHABLE();
1442 return VK_COMPARE_OP_ALWAYS;
1443 }
1444 }
1445
GetOffset(const gl::Offset & glOffset,VkOffset3D * vkOffset)1446 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset)
1447 {
1448 vkOffset->x = glOffset.x;
1449 vkOffset->y = glOffset.y;
1450 vkOffset->z = glOffset.z;
1451 }
1452
GetExtent(const gl::Extents & glExtent,VkExtent3D * vkExtent)1453 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent)
1454 {
1455 vkExtent->width = glExtent.width;
1456 vkExtent->height = glExtent.height;
1457 vkExtent->depth = glExtent.depth;
1458 }
1459
GetImageType(gl::TextureType textureType)1460 VkImageType GetImageType(gl::TextureType textureType)
1461 {
1462 switch (textureType)
1463 {
1464 case gl::TextureType::_2D:
1465 case gl::TextureType::_2DArray:
1466 case gl::TextureType::_2DMultisample:
1467 case gl::TextureType::_2DMultisampleArray:
1468 case gl::TextureType::CubeMap:
1469 case gl::TextureType::CubeMapArray:
1470 case gl::TextureType::External:
1471 return VK_IMAGE_TYPE_2D;
1472 case gl::TextureType::_3D:
1473 return VK_IMAGE_TYPE_3D;
1474 default:
1475 // We will need to implement all the texture types for ES3+.
1476 UNIMPLEMENTED();
1477 return VK_IMAGE_TYPE_MAX_ENUM;
1478 }
1479 }
1480
GetImageViewType(gl::TextureType textureType)1481 VkImageViewType GetImageViewType(gl::TextureType textureType)
1482 {
1483 switch (textureType)
1484 {
1485 case gl::TextureType::_2D:
1486 case gl::TextureType::_2DMultisample:
1487 case gl::TextureType::External:
1488 return VK_IMAGE_VIEW_TYPE_2D;
1489 case gl::TextureType::_2DArray:
1490 case gl::TextureType::_2DMultisampleArray:
1491 return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1492 case gl::TextureType::_3D:
1493 return VK_IMAGE_VIEW_TYPE_3D;
1494 case gl::TextureType::CubeMap:
1495 return VK_IMAGE_VIEW_TYPE_CUBE;
1496 case gl::TextureType::CubeMapArray:
1497 return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
1498 default:
1499 // We will need to implement all the texture types for ES3+.
1500 UNIMPLEMENTED();
1501 return VK_IMAGE_VIEW_TYPE_MAX_ENUM;
1502 }
1503 }
1504
GetColorComponentFlags(bool red,bool green,bool blue,bool alpha)1505 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha)
1506 {
1507 return (red ? VK_COLOR_COMPONENT_R_BIT : 0) | (green ? VK_COLOR_COMPONENT_G_BIT : 0) |
1508 (blue ? VK_COLOR_COMPONENT_B_BIT : 0) | (alpha ? VK_COLOR_COMPONENT_A_BIT : 0);
1509 }
1510
GetShaderStageFlags(gl::ShaderBitSet activeShaders)1511 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders)
1512 {
1513 VkShaderStageFlags flags = 0;
1514 for (const gl::ShaderType shaderType : activeShaders)
1515 {
1516 flags |= kShaderStageMap[shaderType];
1517 }
1518 return flags;
1519 }
1520
GetViewport(const gl::Rectangle & viewport,float nearPlane,float farPlane,bool invertViewport,bool clipSpaceOriginUpperLeft,GLint renderAreaHeight,VkViewport * viewportOut)1521 void GetViewport(const gl::Rectangle &viewport,
1522 float nearPlane,
1523 float farPlane,
1524 bool invertViewport,
1525 bool clipSpaceOriginUpperLeft,
1526 GLint renderAreaHeight,
1527 VkViewport *viewportOut)
1528 {
1529 viewportOut->x = static_cast<float>(viewport.x);
1530 viewportOut->y = static_cast<float>(viewport.y);
1531 viewportOut->width = static_cast<float>(viewport.width);
1532 viewportOut->height = static_cast<float>(viewport.height);
1533 viewportOut->minDepth = gl::clamp01(nearPlane);
1534 viewportOut->maxDepth = gl::clamp01(farPlane);
1535
1536 // Say an application intends to draw a primitive (shown as 'o' below), it can choose to use
1537 // different clip space origin. When clip space origin (shown as 'C' below) is switched from
1538 // lower-left to upper-left, primitives will be rendered with its y-coordinate flipped.
1539
1540 // Rendered content will differ based on whether it is a default framebuffer or a user defined
1541 // framebuffer. We modify the viewport's 'y' and 'h' accordingly.
1542
1543 // clip space origin is lower-left
1544 // Expected draw in GLES default framebuffer user defined framebuffer
1545 // (0,H) (0,0) (0,0)
1546 // + +-----------+ (W,0) +-----------+ (W,0)
1547 // | | | C----+
1548 // | | | | | (h)
1549 // | +----+ | +----+ | | O |
1550 // | | O | | | O | (-h) | +----+
1551 // | | | | | | |
1552 // | C----+ | C----+ |
1553 // +-----------+ (W,0) + +
1554 // (0,0) (0,H) (0,H)
1555 // y' = H - h y' = y
1556
1557 // clip space origin is upper-left
1558 // Expected draw in GLES default framebuffer user defined framebuffer
1559 // (0,H) (0,0) (0,0)
1560 // + +-----------+ (W,0) +-----------+ (W,0)
1561 // | | | +----+
1562 // | | | | O | (-h)
1563 // | C----+ | C----+ | | |
1564 // | | | | | | (h) | C----+
1565 // | | O | | | O | |
1566 // | +----+ | +----+ |
1567 // +-----------+ (W,0) + +
1568 // (0,0) (0,H) (0,H)
1569 // y' = H - (y + h) y' = y + H
1570
1571 if (clipSpaceOriginUpperLeft)
1572 {
1573 if (invertViewport)
1574 {
1575 viewportOut->y = static_cast<float>(renderAreaHeight - (viewport.height + viewport.y));
1576 }
1577 else
1578 {
1579 viewportOut->y = static_cast<float>(viewport.height + viewport.y);
1580 viewportOut->height = -viewportOut->height;
1581 }
1582 }
1583 else
1584 {
1585 if (invertViewport)
1586 {
1587 viewportOut->y = static_cast<float>(renderAreaHeight - viewport.y);
1588 viewportOut->height = -viewportOut->height;
1589 }
1590 }
1591 }
1592
GetExtentsAndLayerCount(gl::TextureType textureType,const gl::Extents & extents,VkExtent3D * extentsOut,uint32_t * layerCountOut)1593 void GetExtentsAndLayerCount(gl::TextureType textureType,
1594 const gl::Extents &extents,
1595 VkExtent3D *extentsOut,
1596 uint32_t *layerCountOut)
1597 {
1598 extentsOut->width = extents.width;
1599 extentsOut->height = extents.height;
1600
1601 switch (textureType)
1602 {
1603 case gl::TextureType::CubeMap:
1604 extentsOut->depth = 1;
1605 *layerCountOut = gl::kCubeFaceCount;
1606 break;
1607
1608 case gl::TextureType::_2DArray:
1609 case gl::TextureType::_2DMultisampleArray:
1610 case gl::TextureType::CubeMapArray:
1611 extentsOut->depth = 1;
1612 *layerCountOut = extents.depth;
1613 break;
1614
1615 default:
1616 extentsOut->depth = extents.depth;
1617 *layerCountOut = 1;
1618 break;
1619 }
1620 }
1621
GetLevelIndex(gl::LevelIndex levelGL,gl::LevelIndex baseLevel)1622 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel)
1623 {
1624 ASSERT(baseLevel <= levelGL);
1625 return vk::LevelIndex(levelGL.get() - baseLevel.get());
1626 }
1627
1628 } // namespace gl_vk
1629
1630 namespace vk_gl
1631 {
AddSampleCounts(VkSampleCountFlags sampleCounts,gl::SupportedSampleSet * setOut)1632 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *setOut)
1633 {
1634 // The possible bits are VK_SAMPLE_COUNT_n_BIT = n, with n = 1 << b. At the time of this
1635 // writing, b is in [0, 6], however, we test all 32 bits in case the enum is extended.
1636 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1637 {
1638 setOut->insert(static_cast<GLuint>(1 << bit));
1639 }
1640 }
1641
GetMaxSampleCount(VkSampleCountFlags sampleCounts)1642 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts)
1643 {
1644 GLuint maxCount = 0;
1645 for (size_t bit : angle::BitSet32<32>(sampleCounts & kSupportedSampleCounts))
1646 {
1647 maxCount = static_cast<GLuint>(1 << bit);
1648 }
1649 return maxCount;
1650 }
1651
GetSampleCount(VkSampleCountFlags supportedCounts,GLuint requestedCount)1652 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount)
1653 {
1654 for (size_t bit : angle::BitSet32<32>(supportedCounts & kSupportedSampleCounts))
1655 {
1656 GLuint sampleCount = static_cast<GLuint>(1 << bit);
1657 if (sampleCount >= requestedCount)
1658 {
1659 return sampleCount;
1660 }
1661 }
1662
1663 UNREACHABLE();
1664 return 0;
1665 }
1666
GetLevelIndex(vk::LevelIndex levelVk,gl::LevelIndex baseLevel)1667 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel)
1668 {
1669 return gl::LevelIndex(levelVk.get() + baseLevel.get());
1670 }
1671 } // namespace vk_gl
1672 } // namespace rx
1673