1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "common/vulkan/vk_headers.h"
15 #include "libANGLE/renderer/renderer_utils.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17 #include "libANGLE/trace.h"
18
19 namespace rx
20 {
21 enum class DescriptorSetIndex : uint32_t;
22
23 namespace vk
24 {
25 // Helper macros that apply to all the wrapped object types.
26 // Unimplemented handle types:
27 // Instance
28 // PhysicalDevice
29 // Device
30 // Queue
31 // DescriptorSet
32
33 #define ANGLE_HANDLE_TYPES_X(FUNC) \
34 FUNC(Allocation) \
35 FUNC(Allocator) \
36 FUNC(Buffer) \
37 FUNC(BufferBlock) \
38 FUNC(BufferView) \
39 FUNC(CommandPool) \
40 FUNC(DescriptorPool) \
41 FUNC(DescriptorSetLayout) \
42 FUNC(DeviceMemory) \
43 FUNC(Event) \
44 FUNC(Fence) \
45 FUNC(Framebuffer) \
46 FUNC(Image) \
47 FUNC(ImageView) \
48 FUNC(Pipeline) \
49 FUNC(PipelineCache) \
50 FUNC(PipelineLayout) \
51 FUNC(QueryPool) \
52 FUNC(RenderPass) \
53 FUNC(Sampler) \
54 FUNC(SamplerYcbcrConversion) \
55 FUNC(Semaphore) \
56 FUNC(ShaderModule)
57
58 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
59
60 enum class HandleType
61 {
62 Invalid,
63 CommandBuffer,
64 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
65 };
66
67 #undef ANGLE_COMMA_SEP_FUNC
68
69 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)70 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
71 namespace priv
72 {
73 class CommandBuffer;
74 } // namespace priv
75 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
76
77 // Returns the HandleType of a Vk Handle.
78 template <typename T>
79 struct HandleTypeHelper;
80
81 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
82 template <> \
83 struct HandleTypeHelper<TYPE> \
84 { \
85 constexpr static HandleType kHandleType = HandleType::TYPE; \
86 };
87
88 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
89 template <>
90 struct HandleTypeHelper<priv::CommandBuffer>
91 {
92 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
93 };
94
95 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
96
97 // Base class for all wrapped vulkan objects. Implements several common helper routines.
98 template <typename DerivedT, typename HandleT>
99 class WrappedObject : angle::NonCopyable
100 {
101 public:
102 HandleT getHandle() const { return mHandle; }
103 void setHandle(HandleT handle) { mHandle = handle; }
104 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
105
106 const HandleT *ptr() const { return &mHandle; }
107
108 HandleT release()
109 {
110 HandleT handle = mHandle;
111 mHandle = VK_NULL_HANDLE;
112 return handle;
113 }
114
115 protected:
116 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
117 ~WrappedObject() { ASSERT(!valid()); }
118
119 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
120 {
121 other.mHandle = VK_NULL_HANDLE;
122 }
123
124 // Only works to initialize empty objects, since we don't have the device handle.
125 WrappedObject &operator=(WrappedObject &&other)
126 {
127 ASSERT(!valid());
128 std::swap(mHandle, other.mHandle);
129 return *this;
130 }
131
132 HandleT mHandle;
133 };
134
135 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
136 {
137 public:
138 CommandPool() = default;
139
140 void destroy(VkDevice device);
141 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
142 void freeCommandBuffers(VkDevice device,
143 uint32_t commandBufferCount,
144 const VkCommandBuffer *commandBuffers);
145
146 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
147 };
148
149 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
150 {
151 public:
152 Pipeline() = default;
153 void destroy(VkDevice device);
154
155 VkResult initGraphics(VkDevice device,
156 const VkGraphicsPipelineCreateInfo &createInfo,
157 const PipelineCache &pipelineCacheVk);
158 VkResult initCompute(VkDevice device,
159 const VkComputePipelineCreateInfo &createInfo,
160 const PipelineCache &pipelineCacheVk);
161 };
162
163 namespace priv
164 {
165
166 // Helper class that wraps a Vulkan command buffer.
167 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
168 {
169 public:
170 CommandBuffer() = default;
171
172 VkCommandBuffer releaseHandle();
173
174 // This is used for normal pool allocated command buffers. It reset the handle.
175 void destroy(VkDevice device);
176
177 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
178 void destroy(VkDevice device, const CommandPool &commandPool);
179
180 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
181
182 using WrappedObject::operator=;
183
184 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
185 {
186 return (features.inheritedQueries == VK_TRUE);
187 }
188
189 // Vulkan command buffers are executed as secondary command buffers within a primary command
190 // buffer.
191 static constexpr bool ExecutesInline() { return false; }
192
193 VkResult begin(const VkCommandBufferBeginInfo &info);
194
195 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
196
197 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
198
199 void bindDescriptorSets(const PipelineLayout &layout,
200 VkPipelineBindPoint pipelineBindPoint,
201 DescriptorSetIndex firstSet,
202 uint32_t descriptorSetCount,
203 const VkDescriptorSet *descriptorSets,
204 uint32_t dynamicOffsetCount,
205 const uint32_t *dynamicOffsets);
206 void bindGraphicsPipeline(const Pipeline &pipeline);
207 void bindComputePipeline(const Pipeline &pipeline);
208 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
209
210 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
211 void bindVertexBuffers(uint32_t firstBinding,
212 uint32_t bindingCount,
213 const VkBuffer *buffers,
214 const VkDeviceSize *offsets);
215
216 void blitImage(const Image &srcImage,
217 VkImageLayout srcImageLayout,
218 const Image &dstImage,
219 VkImageLayout dstImageLayout,
220 uint32_t regionCount,
221 const VkImageBlit *regions,
222 VkFilter filter);
223
224 void clearColorImage(const Image &image,
225 VkImageLayout imageLayout,
226 const VkClearColorValue &color,
227 uint32_t rangeCount,
228 const VkImageSubresourceRange *ranges);
229 void clearDepthStencilImage(const Image &image,
230 VkImageLayout imageLayout,
231 const VkClearDepthStencilValue &depthStencil,
232 uint32_t rangeCount,
233 const VkImageSubresourceRange *ranges);
234
235 void clearAttachments(uint32_t attachmentCount,
236 const VkClearAttachment *attachments,
237 uint32_t rectCount,
238 const VkClearRect *rects);
239
240 void copyBuffer(const Buffer &srcBuffer,
241 const Buffer &destBuffer,
242 uint32_t regionCount,
243 const VkBufferCopy *regions);
244
245 void copyBufferToImage(VkBuffer srcBuffer,
246 const Image &dstImage,
247 VkImageLayout dstImageLayout,
248 uint32_t regionCount,
249 const VkBufferImageCopy *regions);
250 void copyImageToBuffer(const Image &srcImage,
251 VkImageLayout srcImageLayout,
252 VkBuffer dstBuffer,
253 uint32_t regionCount,
254 const VkBufferImageCopy *regions);
255 void copyImage(const Image &srcImage,
256 VkImageLayout srcImageLayout,
257 const Image &dstImage,
258 VkImageLayout dstImageLayout,
259 uint32_t regionCount,
260 const VkImageCopy *regions);
261
262 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
263 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
264
265 void draw(uint32_t vertexCount,
266 uint32_t instanceCount,
267 uint32_t firstVertex,
268 uint32_t firstInstance);
269 void drawIndexed(uint32_t indexCount,
270 uint32_t instanceCount,
271 uint32_t firstIndex,
272 int32_t vertexOffset,
273 uint32_t firstInstance);
274 void drawIndexedIndirect(const Buffer &buffer,
275 VkDeviceSize offset,
276 uint32_t drawCount,
277 uint32_t stride);
278 void drawIndirect(const Buffer &buffer,
279 VkDeviceSize offset,
280 uint32_t drawCount,
281 uint32_t stride);
282
283 VkResult end();
284 void endQuery(const QueryPool &queryPool, uint32_t query);
285 void endRenderPass();
286 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
287
288 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
289
290 void fillBuffer(const Buffer &dstBuffer,
291 VkDeviceSize dstOffset,
292 VkDeviceSize size,
293 uint32_t data);
294
295 void imageBarrier(VkPipelineStageFlags srcStageMask,
296 VkPipelineStageFlags dstStageMask,
297 const VkImageMemoryBarrier &imageMemoryBarrier);
298
299 void nextSubpass(VkSubpassContents subpassContents);
300
301 void memoryBarrier(VkPipelineStageFlags srcStageMask,
302 VkPipelineStageFlags dstStageMask,
303 const VkMemoryBarrier *memoryBarrier);
304
305 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
306 VkPipelineStageFlags dstStageMask,
307 VkDependencyFlags dependencyFlags,
308 uint32_t memoryBarrierCount,
309 const VkMemoryBarrier *memoryBarriers,
310 uint32_t bufferMemoryBarrierCount,
311 const VkBufferMemoryBarrier *bufferMemoryBarriers,
312 uint32_t imageMemoryBarrierCount,
313 const VkImageMemoryBarrier *imageMemoryBarriers);
314
315 void pushConstants(const PipelineLayout &layout,
316 VkShaderStageFlags flag,
317 uint32_t offset,
318 uint32_t size,
319 const void *data);
320
321 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
322 void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *viewports);
323 void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *scissors);
324 VkResult reset();
325 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
326 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
327 void resolveImage(const Image &srcImage,
328 VkImageLayout srcImageLayout,
329 const Image &dstImage,
330 VkImageLayout dstImageLayout,
331 uint32_t regionCount,
332 const VkImageResolve *regions);
333 void waitEvents(uint32_t eventCount,
334 const VkEvent *events,
335 VkPipelineStageFlags srcStageMask,
336 VkPipelineStageFlags dstStageMask,
337 uint32_t memoryBarrierCount,
338 const VkMemoryBarrier *memoryBarriers,
339 uint32_t bufferMemoryBarrierCount,
340 const VkBufferMemoryBarrier *bufferMemoryBarriers,
341 uint32_t imageMemoryBarrierCount,
342 const VkImageMemoryBarrier *imageMemoryBarriers);
343
344 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
345 const QueryPool &queryPool,
346 uint32_t query);
347
348 // VK_EXT_transform_feedback
349 void beginTransformFeedback(uint32_t firstCounterBuffer,
350 uint32_t counterBufferCount,
351 const VkBuffer *counterBuffers,
352 const VkDeviceSize *counterBufferOffsets);
353 void endTransformFeedback(uint32_t firstCounterBuffer,
354 uint32_t counterBufferCount,
355 const VkBuffer *counterBuffers,
356 const VkDeviceSize *counterBufferOffsets);
357 void bindTransformFeedbackBuffers(uint32_t firstBinding,
358 uint32_t bindingCount,
359 const VkBuffer *buffers,
360 const VkDeviceSize *offsets,
361 const VkDeviceSize *sizes);
362
363 // VK_EXT_debug_utils
364 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
365 void endDebugUtilsLabelEXT();
366 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
367 };
368 } // namespace priv
369
370 using PrimaryCommandBuffer = priv::CommandBuffer;
371
372 class Image final : public WrappedObject<Image, VkImage>
373 {
374 public:
375 Image() = default;
376
377 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
378 void setHandle(VkImage handle);
379
380 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
381 void reset();
382
383 // Called on shutdown when the helper class *does* own the handle to the image resource.
384 void destroy(VkDevice device);
385
386 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
387
388 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
389 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
390 VkResult bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo);
391
392 void getSubresourceLayout(VkDevice device,
393 VkImageAspectFlagBits aspectMask,
394 uint32_t mipLevel,
395 uint32_t arrayLayer,
396 VkSubresourceLayout *outSubresourceLayout) const;
397 };
398
399 class ImageView final : public WrappedObject<ImageView, VkImageView>
400 {
401 public:
402 ImageView() = default;
403 void destroy(VkDevice device);
404
405 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
406 };
407
408 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
409 {
410 public:
411 Semaphore() = default;
412 void destroy(VkDevice device);
413
414 VkResult init(VkDevice device);
415 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
416 };
417
418 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
419 {
420 public:
421 Framebuffer() = default;
422 void destroy(VkDevice device);
423
424 // Use this method only in necessary cases. (RenderPass)
425 void setHandle(VkFramebuffer handle);
426
427 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
428 };
429
430 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
431 {
432 public:
433 DeviceMemory() = default;
434 void destroy(VkDevice device);
435
436 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
437 VkResult map(VkDevice device,
438 VkDeviceSize offset,
439 VkDeviceSize size,
440 VkMemoryMapFlags flags,
441 uint8_t **mapPointer) const;
442 void unmap(VkDevice device) const;
443 void flush(VkDevice device, VkMappedMemoryRange &memRange);
444 void invalidate(VkDevice device, VkMappedMemoryRange &memRange);
445 };
446
447 class Allocator : public WrappedObject<Allocator, VmaAllocator>
448 {
449 public:
450 Allocator() = default;
451 void destroy();
452
453 VkResult init(VkPhysicalDevice physicalDevice,
454 VkDevice device,
455 VkInstance instance,
456 uint32_t apiVersion,
457 VkDeviceSize preferredLargeHeapBlockSize);
458
459 // Initializes the buffer handle and memory allocation.
460 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
461 VkMemoryPropertyFlags requiredFlags,
462 VkMemoryPropertyFlags preferredFlags,
463 bool persistentlyMappedBuffers,
464 uint32_t *memoryTypeIndexOut,
465 Buffer *bufferOut,
466 Allocation *allocationOut) const;
467
468 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
469 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
470 VkMemoryPropertyFlags requiredFlags,
471 VkMemoryPropertyFlags preferredFlags,
472 bool persistentlyMappedBuffers,
473 uint32_t *memoryTypeIndexOut) const;
474
475 void buildStatsString(char **statsString, VkBool32 detailedMap);
476 void freeStatsString(char *statsString);
477 };
478
479 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
480 {
481 public:
482 Allocation() = default;
483 void destroy(const Allocator &allocator);
484
485 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
486 void unmap(const Allocator &allocator) const;
487 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
488 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
489
490 private:
491 friend class Allocator;
492 };
493
494 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
495 {
496 public:
497 RenderPass() = default;
498 void destroy(VkDevice device);
499
500 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
501 VkResult init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo);
502 };
503
504 enum class StagingUsage
505 {
506 Read,
507 Write,
508 Both,
509 };
510
511 class Buffer final : public WrappedObject<Buffer, VkBuffer>
512 {
513 public:
514 Buffer() = default;
515 void destroy(VkDevice device);
516
517 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
518 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory, VkDeviceSize offset);
519 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
520
521 private:
522 friend class Allocator;
523 };
524
525 class BufferView final : public WrappedObject<BufferView, VkBufferView>
526 {
527 public:
528 BufferView() = default;
529 void destroy(VkDevice device);
530
531 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
532 };
533
534 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
535 {
536 public:
537 ShaderModule() = default;
538 void destroy(VkDevice device);
539
540 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
541 };
542
543 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
544 {
545 public:
546 PipelineLayout() = default;
547 void destroy(VkDevice device);
548
549 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
550 };
551
552 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
553 {
554 public:
555 PipelineCache() = default;
556 void destroy(VkDevice device);
557
558 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
559 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData);
560 VkResult merge(VkDevice device,
561 VkPipelineCache dstCache,
562 uint32_t srcCacheCount,
563 const VkPipelineCache *srcCaches);
564 };
565
566 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
567 {
568 public:
569 DescriptorSetLayout() = default;
570 void destroy(VkDevice device);
571
572 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
573 };
574
575 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
576 {
577 public:
578 DescriptorPool() = default;
579 void destroy(VkDevice device);
580
581 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
582
583 VkResult allocateDescriptorSets(VkDevice device,
584 const VkDescriptorSetAllocateInfo &allocInfo,
585 VkDescriptorSet *descriptorSetsOut);
586 VkResult freeDescriptorSets(VkDevice device,
587 uint32_t descriptorSetCount,
588 const VkDescriptorSet *descriptorSets);
589 };
590
591 class Sampler final : public WrappedObject<Sampler, VkSampler>
592 {
593 public:
594 Sampler() = default;
595 void destroy(VkDevice device);
596 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
597 };
598
599 class SamplerYcbcrConversion final
600 : public WrappedObject<SamplerYcbcrConversion, VkSamplerYcbcrConversion>
601 {
602 public:
603 SamplerYcbcrConversion() = default;
604 void destroy(VkDevice device);
605 VkResult init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo);
606 };
607
608 class Event final : public WrappedObject<Event, VkEvent>
609 {
610 public:
611 Event() = default;
612 void destroy(VkDevice device);
613 using WrappedObject::operator=;
614
615 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
616 VkResult getStatus(VkDevice device) const;
617 VkResult set(VkDevice device) const;
618 VkResult reset(VkDevice device) const;
619 };
620
621 class Fence final : public WrappedObject<Fence, VkFence>
622 {
623 public:
624 Fence() = default;
625 void destroy(VkDevice device);
626 using WrappedObject::operator=;
627
628 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
629 VkResult reset(VkDevice device);
630 VkResult getStatus(VkDevice device) const;
631 VkResult wait(VkDevice device, uint64_t timeout) const;
632 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
633 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
634 };
635
636 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
637 {
638 public:
639 QueryPool() = default;
640 void destroy(VkDevice device);
641
642 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
643 VkResult getResults(VkDevice device,
644 uint32_t firstQuery,
645 uint32_t queryCount,
646 size_t dataSize,
647 void *data,
648 VkDeviceSize stride,
649 VkQueryResultFlags flags) const;
650 };
651
652 // VirtualBlock
653 class VirtualBlock final : public WrappedObject<VirtualBlock, VmaVirtualBlock>
654 {
655 public:
656 VirtualBlock() = default;
657 void destroy(VkDevice device);
658 VkResult init(VkDevice device, vma::VirtualBlockCreateFlags flags, VkDeviceSize size);
659
660 VkResult allocate(VkDeviceSize size, VkDeviceSize alignment, VkDeviceSize *offsetOut);
661 void free(VkDeviceSize offset);
662 };
663
664 // CommandPool implementation.
665 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
666 {
667 if (valid())
668 {
669 vkDestroyCommandPool(device, mHandle, nullptr);
670 mHandle = VK_NULL_HANDLE;
671 }
672 }
673
674 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
675 {
676 ASSERT(valid());
677 return vkResetCommandPool(device, mHandle, flags);
678 }
679
680 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
681 uint32_t commandBufferCount,
682 const VkCommandBuffer *commandBuffers)
683 {
684 ASSERT(valid());
685 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
686 }
687
688 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
689 {
690 ASSERT(!valid());
691 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
692 }
693
694 namespace priv
695 {
696
697 // CommandBuffer implementation.
698 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
699 {
700 VkCommandBuffer handle = mHandle;
701 mHandle = nullptr;
702 return handle;
703 }
704
705 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
706 const VkCommandBufferAllocateInfo &createInfo)
707 {
708 ASSERT(!valid());
709 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
710 }
711
712 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
713 VkImageLayout srcImageLayout,
714 const Image &dstImage,
715 VkImageLayout dstImageLayout,
716 uint32_t regionCount,
717 const VkImageBlit *regions,
718 VkFilter filter)
719 {
720 ASSERT(valid() && srcImage.valid() && dstImage.valid());
721 ASSERT(regionCount == 1);
722 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
723 dstImageLayout, 1, regions, filter);
724 }
725
726 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
727 {
728 ANGLE_TRACE_EVENT0("gpu.angle", "CommandBuffer::begin");
729 ASSERT(valid());
730 return vkBeginCommandBuffer(mHandle, &info);
731 }
732
733 ANGLE_INLINE VkResult CommandBuffer::end()
734 {
735 ANGLE_TRACE_EVENT0("gpu.angle", "CommandBuffer::end");
736 ASSERT(valid());
737 return vkEndCommandBuffer(mHandle);
738 }
739
740 ANGLE_INLINE VkResult CommandBuffer::reset()
741 {
742 ANGLE_TRACE_EVENT0("gpu.angle", "CommandBuffer::reset");
743 ASSERT(valid());
744 return vkResetCommandBuffer(mHandle, 0);
745 }
746
747 ANGLE_INLINE void CommandBuffer::nextSubpass(VkSubpassContents subpassContents)
748 {
749 ASSERT(valid());
750 vkCmdNextSubpass(mHandle, subpassContents);
751 }
752
753 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
754 VkPipelineStageFlags dstStageMask,
755 const VkMemoryBarrier *memoryBarrier)
756 {
757 ASSERT(valid());
758 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, memoryBarrier, 0, nullptr, 0,
759 nullptr);
760 }
761
762 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
763 VkPipelineStageFlags dstStageMask,
764 VkDependencyFlags dependencyFlags,
765 uint32_t memoryBarrierCount,
766 const VkMemoryBarrier *memoryBarriers,
767 uint32_t bufferMemoryBarrierCount,
768 const VkBufferMemoryBarrier *bufferMemoryBarriers,
769 uint32_t imageMemoryBarrierCount,
770 const VkImageMemoryBarrier *imageMemoryBarriers)
771 {
772 ASSERT(valid());
773 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
774 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
775 imageMemoryBarrierCount, imageMemoryBarriers);
776 }
777
778 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
779 VkPipelineStageFlags dstStageMask,
780 const VkImageMemoryBarrier &imageMemoryBarrier)
781 {
782 ASSERT(valid());
783 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
784 &imageMemoryBarrier);
785 }
786
787 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
788 {
789 releaseHandle();
790 }
791
792 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
793 {
794 if (valid())
795 {
796 ASSERT(commandPool.valid());
797 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
798 mHandle = VK_NULL_HANDLE;
799 }
800 }
801
802 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
803 const Buffer &destBuffer,
804 uint32_t regionCount,
805 const VkBufferCopy *regions)
806 {
807 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
808 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
809 }
810
811 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
812 const Image &dstImage,
813 VkImageLayout dstImageLayout,
814 uint32_t regionCount,
815 const VkBufferImageCopy *regions)
816 {
817 ASSERT(valid() && dstImage.valid());
818 ASSERT(srcBuffer != VK_NULL_HANDLE);
819 ASSERT(regionCount == 1);
820 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
821 }
822
823 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
824 VkImageLayout srcImageLayout,
825 VkBuffer dstBuffer,
826 uint32_t regionCount,
827 const VkBufferImageCopy *regions)
828 {
829 ASSERT(valid() && srcImage.valid());
830 ASSERT(dstBuffer != VK_NULL_HANDLE);
831 ASSERT(regionCount == 1);
832 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
833 }
834
835 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
836 VkImageLayout imageLayout,
837 const VkClearColorValue &color,
838 uint32_t rangeCount,
839 const VkImageSubresourceRange *ranges)
840 {
841 ASSERT(valid());
842 ASSERT(rangeCount == 1);
843 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
844 }
845
846 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
847 const Image &image,
848 VkImageLayout imageLayout,
849 const VkClearDepthStencilValue &depthStencil,
850 uint32_t rangeCount,
851 const VkImageSubresourceRange *ranges)
852 {
853 ASSERT(valid());
854 ASSERT(rangeCount == 1);
855 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
856 }
857
858 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
859 const VkClearAttachment *attachments,
860 uint32_t rectCount,
861 const VkClearRect *rects)
862 {
863 ASSERT(valid());
864 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
865 }
866
867 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
868 VkImageLayout srcImageLayout,
869 const Image &dstImage,
870 VkImageLayout dstImageLayout,
871 uint32_t regionCount,
872 const VkImageCopy *regions)
873 {
874 ASSERT(valid() && srcImage.valid() && dstImage.valid());
875 ASSERT(regionCount == 1);
876 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
877 dstImageLayout, 1, regions);
878 }
879
880 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
881 VkSubpassContents subpassContents)
882 {
883 ASSERT(valid());
884 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
885 }
886
887 ANGLE_INLINE void CommandBuffer::endRenderPass()
888 {
889 ASSERT(mHandle != VK_NULL_HANDLE);
890 vkCmdEndRenderPass(mHandle);
891 }
892
893 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
894 VkDeviceSize offset,
895 VkIndexType indexType)
896 {
897 ASSERT(valid());
898 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
899 }
900
901 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
902 VkPipelineBindPoint pipelineBindPoint,
903 DescriptorSetIndex firstSet,
904 uint32_t descriptorSetCount,
905 const VkDescriptorSet *descriptorSets,
906 uint32_t dynamicOffsetCount,
907 const uint32_t *dynamicOffsets)
908 {
909 ASSERT(valid() && layout.valid());
910 vkCmdBindDescriptorSets(this->mHandle, pipelineBindPoint, layout.getHandle(),
911 ToUnderlying(firstSet), descriptorSetCount, descriptorSets,
912 dynamicOffsetCount, dynamicOffsets);
913 }
914
915 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
916 const CommandBuffer *commandBuffers)
917 {
918 ASSERT(valid());
919 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
920 }
921
922 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
923 size_t *allocatedMemoryOut) const
924 {
925 // No data available.
926 *usedMemoryOut = 0;
927 *allocatedMemoryOut = 1;
928 }
929
930 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
931 VkDeviceSize dstOffset,
932 VkDeviceSize size,
933 uint32_t data)
934 {
935 ASSERT(valid());
936 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
937 }
938
939 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
940 VkShaderStageFlags flag,
941 uint32_t offset,
942 uint32_t size,
943 const void *data)
944 {
945 ASSERT(valid() && layout.valid());
946 ASSERT(offset == 0);
947 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
948 }
949
950 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
951 {
952 ASSERT(valid() && event != VK_NULL_HANDLE);
953 vkCmdSetEvent(mHandle, event, stageMask);
954 }
955
956 ANGLE_INLINE void CommandBuffer::setViewport(uint32_t firstViewport,
957 uint32_t viewportCount,
958 const VkViewport *viewports)
959 {
960 ASSERT(valid() && viewports != nullptr);
961 vkCmdSetViewport(mHandle, firstViewport, viewportCount, viewports);
962 }
963
964 ANGLE_INLINE void CommandBuffer::setScissor(uint32_t firstScissor,
965 uint32_t scissorCount,
966 const VkRect2D *scissors)
967 {
968 ASSERT(valid() && scissors != nullptr);
969 vkCmdSetScissor(mHandle, firstScissor, scissorCount, scissors);
970 }
971
972 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
973 {
974 ASSERT(valid() && event != VK_NULL_HANDLE);
975 vkCmdResetEvent(mHandle, event, stageMask);
976 }
977
978 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
979 const VkEvent *events,
980 VkPipelineStageFlags srcStageMask,
981 VkPipelineStageFlags dstStageMask,
982 uint32_t memoryBarrierCount,
983 const VkMemoryBarrier *memoryBarriers,
984 uint32_t bufferMemoryBarrierCount,
985 const VkBufferMemoryBarrier *bufferMemoryBarriers,
986 uint32_t imageMemoryBarrierCount,
987 const VkImageMemoryBarrier *imageMemoryBarriers)
988 {
989 ASSERT(valid());
990 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
991 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
992 imageMemoryBarrierCount, imageMemoryBarriers);
993 }
994
995 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
996 uint32_t firstQuery,
997 uint32_t queryCount)
998 {
999 ASSERT(valid() && queryPool.valid());
1000 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
1001 }
1002
1003 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
1004 VkImageLayout srcImageLayout,
1005 const Image &dstImage,
1006 VkImageLayout dstImageLayout,
1007 uint32_t regionCount,
1008 const VkImageResolve *regions)
1009 {
1010 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1011 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1012 dstImageLayout, regionCount, regions);
1013 }
1014
1015 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
1016 uint32_t query,
1017 VkQueryControlFlags flags)
1018 {
1019 ASSERT(valid() && queryPool.valid());
1020 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
1021 }
1022
1023 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1024 {
1025 ASSERT(valid() && queryPool.valid());
1026 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1027 }
1028
1029 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1030 const QueryPool &queryPool,
1031 uint32_t query)
1032 {
1033 ASSERT(valid());
1034 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1035 }
1036
1037 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1038 uint32_t instanceCount,
1039 uint32_t firstVertex,
1040 uint32_t firstInstance)
1041 {
1042 ASSERT(valid());
1043 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1044 }
1045
1046 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1047 uint32_t instanceCount,
1048 uint32_t firstIndex,
1049 int32_t vertexOffset,
1050 uint32_t firstInstance)
1051 {
1052 ASSERT(valid());
1053 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1054 }
1055
1056 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1057 VkDeviceSize offset,
1058 uint32_t drawCount,
1059 uint32_t stride)
1060 {
1061 ASSERT(valid());
1062 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1063 }
1064
1065 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1066 VkDeviceSize offset,
1067 uint32_t drawCount,
1068 uint32_t stride)
1069 {
1070 ASSERT(valid());
1071 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1072 }
1073
1074 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1075 uint32_t groupCountY,
1076 uint32_t groupCountZ)
1077 {
1078 ASSERT(valid());
1079 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1080 }
1081
1082 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1083 {
1084 ASSERT(valid());
1085 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1086 }
1087
1088 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1089 const Pipeline &pipeline)
1090 {
1091 ASSERT(valid() && pipeline.valid());
1092 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1093 }
1094
1095 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1096 {
1097 ASSERT(valid() && pipeline.valid());
1098 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1099 }
1100
1101 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1102 {
1103 ASSERT(valid() && pipeline.valid());
1104 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1105 }
1106
1107 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1108 uint32_t bindingCount,
1109 const VkBuffer *buffers,
1110 const VkDeviceSize *offsets)
1111 {
1112 ASSERT(valid());
1113 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1114 }
1115
1116 ANGLE_INLINE void CommandBuffer::beginTransformFeedback(uint32_t firstCounterBuffer,
1117 uint32_t counterBufferCount,
1118 const VkBuffer *counterBuffers,
1119 const VkDeviceSize *counterBufferOffsets)
1120 {
1121 ASSERT(valid());
1122 ASSERT(vkCmdBeginTransformFeedbackEXT);
1123 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1124 counterBufferOffsets);
1125 }
1126
1127 ANGLE_INLINE void CommandBuffer::endTransformFeedback(uint32_t firstCounterBuffer,
1128 uint32_t counterBufferCount,
1129 const VkBuffer *counterBuffers,
1130 const VkDeviceSize *counterBufferOffsets)
1131 {
1132 ASSERT(valid());
1133 ASSERT(vkCmdEndTransformFeedbackEXT);
1134 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1135 counterBufferOffsets);
1136 }
1137
1138 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffers(uint32_t firstBinding,
1139 uint32_t bindingCount,
1140 const VkBuffer *buffers,
1141 const VkDeviceSize *offsets,
1142 const VkDeviceSize *sizes)
1143 {
1144 ASSERT(valid());
1145 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1146 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1147 sizes);
1148 }
1149
1150 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1151 {
1152 ASSERT(valid());
1153 {
1154 #if !defined(ANGLE_SHARED_LIBVULKAN)
1155 // When the vulkan-loader is statically linked, we need to use the extension
1156 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1157 // with volk, this will default to the function definitions with no namespace
1158 using rx::vkCmdBeginDebugUtilsLabelEXT;
1159 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1160 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1161 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1162 }
1163 }
1164
1165 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1166 {
1167 ASSERT(valid());
1168 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1169 vkCmdEndDebugUtilsLabelEXT(mHandle);
1170 }
1171
1172 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1173 {
1174 ASSERT(valid());
1175 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1176 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1177 }
1178 } // namespace priv
1179
1180 // Image implementation.
1181 ANGLE_INLINE void Image::setHandle(VkImage handle)
1182 {
1183 mHandle = handle;
1184 }
1185
1186 ANGLE_INLINE void Image::reset()
1187 {
1188 mHandle = VK_NULL_HANDLE;
1189 }
1190
1191 ANGLE_INLINE void Image::destroy(VkDevice device)
1192 {
1193 if (valid())
1194 {
1195 vkDestroyImage(device, mHandle, nullptr);
1196 mHandle = VK_NULL_HANDLE;
1197 }
1198 }
1199
1200 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1201 {
1202 ASSERT(!valid());
1203 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1204 }
1205
1206 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1207 VkMemoryRequirements *requirementsOut) const
1208 {
1209 ASSERT(valid());
1210 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1211 }
1212
1213 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1214 {
1215 ASSERT(valid() && deviceMemory.valid());
1216 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1217 }
1218
1219 ANGLE_INLINE VkResult Image::bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo)
1220 {
1221 ASSERT(valid());
1222 return vkBindImageMemory2KHR(device, 1, &bindInfo);
1223 }
1224
1225 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1226 VkImageAspectFlagBits aspectMask,
1227 uint32_t mipLevel,
1228 uint32_t arrayLayer,
1229 VkSubresourceLayout *outSubresourceLayout) const
1230 {
1231 VkImageSubresource subresource = {};
1232 subresource.aspectMask = aspectMask;
1233 subresource.mipLevel = mipLevel;
1234 subresource.arrayLayer = arrayLayer;
1235
1236 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1237 }
1238
1239 // ImageView implementation.
1240 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1241 {
1242 if (valid())
1243 {
1244 vkDestroyImageView(device, mHandle, nullptr);
1245 mHandle = VK_NULL_HANDLE;
1246 }
1247 }
1248
1249 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1250 {
1251 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1252 }
1253
1254 // Semaphore implementation.
1255 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1256 {
1257 if (valid())
1258 {
1259 vkDestroySemaphore(device, mHandle, nullptr);
1260 mHandle = VK_NULL_HANDLE;
1261 }
1262 }
1263
1264 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1265 {
1266 ASSERT(!valid());
1267
1268 VkSemaphoreCreateInfo semaphoreInfo = {};
1269 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1270 semaphoreInfo.flags = 0;
1271
1272 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1273 }
1274
1275 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1276 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1277 {
1278 ASSERT(valid());
1279 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1280 }
1281
1282 // Framebuffer implementation.
1283 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1284 {
1285 if (valid())
1286 {
1287 vkDestroyFramebuffer(device, mHandle, nullptr);
1288 mHandle = VK_NULL_HANDLE;
1289 }
1290 }
1291
1292 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1293 {
1294 ASSERT(!valid());
1295 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1296 }
1297
1298 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1299 {
1300 mHandle = handle;
1301 }
1302
1303 // DeviceMemory implementation.
1304 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1305 {
1306 if (valid())
1307 {
1308 vkFreeMemory(device, mHandle, nullptr);
1309 mHandle = VK_NULL_HANDLE;
1310 }
1311 }
1312
1313 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1314 {
1315 ASSERT(!valid());
1316 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1317 }
1318
1319 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1320 VkDeviceSize offset,
1321 VkDeviceSize size,
1322 VkMemoryMapFlags flags,
1323 uint8_t **mapPointer) const
1324 {
1325 ANGLE_TRACE_EVENT0("gpu.angle", "DeviceMemory::map");
1326 ASSERT(valid());
1327 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1328 }
1329
1330 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1331 {
1332 ASSERT(valid());
1333 vkUnmapMemory(device, mHandle);
1334 }
1335
1336 ANGLE_INLINE void DeviceMemory::flush(VkDevice device, VkMappedMemoryRange &memRange)
1337 {
1338 vkFlushMappedMemoryRanges(device, 1, &memRange);
1339 }
1340
1341 ANGLE_INLINE void DeviceMemory::invalidate(VkDevice device, VkMappedMemoryRange &memRange)
1342 {
1343 vkInvalidateMappedMemoryRanges(device, 1, &memRange);
1344 }
1345
1346 // Allocator implementation.
1347 ANGLE_INLINE void Allocator::destroy()
1348 {
1349 if (valid())
1350 {
1351 vma::DestroyAllocator(mHandle);
1352 mHandle = VK_NULL_HANDLE;
1353 }
1354 }
1355
1356 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1357 VkDevice device,
1358 VkInstance instance,
1359 uint32_t apiVersion,
1360 VkDeviceSize preferredLargeHeapBlockSize)
1361 {
1362 ASSERT(!valid());
1363 return vma::InitAllocator(physicalDevice, device, instance, apiVersion,
1364 preferredLargeHeapBlockSize, &mHandle);
1365 }
1366
1367 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1368 VkMemoryPropertyFlags requiredFlags,
1369 VkMemoryPropertyFlags preferredFlags,
1370 bool persistentlyMappedBuffers,
1371 uint32_t *memoryTypeIndexOut,
1372 Buffer *bufferOut,
1373 Allocation *allocationOut) const
1374 {
1375 ASSERT(valid());
1376 ASSERT(bufferOut && !bufferOut->valid());
1377 ASSERT(allocationOut && !allocationOut->valid());
1378 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1379 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1380 &allocationOut->mHandle);
1381 }
1382
1383 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1384 VkMemoryPropertyFlags *flagsOut) const
1385 {
1386 ASSERT(valid());
1387 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1388 }
1389
1390 ANGLE_INLINE VkResult
1391 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1392 VkMemoryPropertyFlags requiredFlags,
1393 VkMemoryPropertyFlags preferredFlags,
1394 bool persistentlyMappedBuffers,
1395 uint32_t *memoryTypeIndexOut) const
1396 {
1397 ASSERT(valid());
1398 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1399 preferredFlags, persistentlyMappedBuffers,
1400 memoryTypeIndexOut);
1401 }
1402
1403 ANGLE_INLINE void Allocator::buildStatsString(char **statsString, VkBool32 detailedMap)
1404 {
1405 ASSERT(valid());
1406 vma::BuildStatsString(mHandle, statsString, detailedMap);
1407 }
1408
1409 ANGLE_INLINE void Allocator::freeStatsString(char *statsString)
1410 {
1411 ASSERT(valid());
1412 vma::FreeStatsString(mHandle, statsString);
1413 }
1414
1415 // Allocation implementation.
1416 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1417 {
1418 if (valid())
1419 {
1420 vma::FreeMemory(allocator.getHandle(), mHandle);
1421 mHandle = VK_NULL_HANDLE;
1422 }
1423 }
1424
1425 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1426 {
1427 ASSERT(valid());
1428 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1429 }
1430
1431 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1432 {
1433 ASSERT(valid());
1434 vma::UnmapMemory(allocator.getHandle(), mHandle);
1435 }
1436
1437 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1438 VkDeviceSize offset,
1439 VkDeviceSize size) const
1440 {
1441 ASSERT(valid());
1442 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1443 }
1444
1445 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1446 VkDeviceSize offset,
1447 VkDeviceSize size) const
1448 {
1449 ASSERT(valid());
1450 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1451 }
1452
1453 // RenderPass implementation.
1454 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1455 {
1456 if (valid())
1457 {
1458 vkDestroyRenderPass(device, mHandle, nullptr);
1459 mHandle = VK_NULL_HANDLE;
1460 }
1461 }
1462
1463 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1464 {
1465 ASSERT(!valid());
1466 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1467 }
1468
1469 ANGLE_INLINE VkResult RenderPass::init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo)
1470 {
1471 ASSERT(!valid());
1472 return vkCreateRenderPass2KHR(device, &createInfo, nullptr, &mHandle);
1473 }
1474
1475 // Buffer implementation.
1476 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1477 {
1478 if (valid())
1479 {
1480 vkDestroyBuffer(device, mHandle, nullptr);
1481 mHandle = VK_NULL_HANDLE;
1482 }
1483 }
1484
1485 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1486 {
1487 ASSERT(!valid());
1488 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1489 }
1490
1491 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device,
1492 const DeviceMemory &deviceMemory,
1493 VkDeviceSize offset)
1494 {
1495 ASSERT(valid() && deviceMemory.valid());
1496 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), offset);
1497 }
1498
1499 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1500 VkMemoryRequirements *memoryRequirementsOut)
1501 {
1502 ASSERT(valid());
1503 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1504 }
1505
1506 // BufferView implementation.
1507 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1508 {
1509 if (valid())
1510 {
1511 vkDestroyBufferView(device, mHandle, nullptr);
1512 mHandle = VK_NULL_HANDLE;
1513 }
1514 }
1515
1516 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1517 {
1518 ASSERT(!valid());
1519 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1520 }
1521
1522 // ShaderModule implementation.
1523 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1524 {
1525 if (mHandle != VK_NULL_HANDLE)
1526 {
1527 vkDestroyShaderModule(device, mHandle, nullptr);
1528 mHandle = VK_NULL_HANDLE;
1529 }
1530 }
1531
1532 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1533 const VkShaderModuleCreateInfo &createInfo)
1534 {
1535 ASSERT(!valid());
1536 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1537 }
1538
1539 // PipelineLayout implementation.
1540 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1541 {
1542 if (valid())
1543 {
1544 vkDestroyPipelineLayout(device, mHandle, nullptr);
1545 mHandle = VK_NULL_HANDLE;
1546 }
1547 }
1548
1549 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1550 const VkPipelineLayoutCreateInfo &createInfo)
1551 {
1552 ASSERT(!valid());
1553 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1554 }
1555
1556 // PipelineCache implementation.
1557 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1558 {
1559 if (valid())
1560 {
1561 vkDestroyPipelineCache(device, mHandle, nullptr);
1562 mHandle = VK_NULL_HANDLE;
1563 }
1564 }
1565
1566 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1567 const VkPipelineCacheCreateInfo &createInfo)
1568 {
1569 ASSERT(!valid());
1570 // Note: if we are concerned with memory usage of this cache, we should give it custom
1571 // allocators. Also, failure of this function is of little importance.
1572 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1573 }
1574
1575 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1576 VkPipelineCache dstCache,
1577 uint32_t srcCacheCount,
1578 const VkPipelineCache *srcCaches)
1579 {
1580 ASSERT(valid());
1581 return vkMergePipelineCaches(device, dstCache, srcCacheCount, srcCaches);
1582 }
1583
1584 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1585 size_t *cacheSize,
1586 void *cacheData)
1587 {
1588 ASSERT(valid());
1589
1590 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1591 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1592 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1593 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1594 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1595 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1596 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1597 }
1598
1599 // Pipeline implementation.
1600 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1601 {
1602 if (valid())
1603 {
1604 vkDestroyPipeline(device, mHandle, nullptr);
1605 mHandle = VK_NULL_HANDLE;
1606 }
1607 }
1608
1609 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1610 const VkGraphicsPipelineCreateInfo &createInfo,
1611 const PipelineCache &pipelineCacheVk)
1612 {
1613 ASSERT(!valid());
1614 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1615 &mHandle);
1616 }
1617
1618 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1619 const VkComputePipelineCreateInfo &createInfo,
1620 const PipelineCache &pipelineCacheVk)
1621 {
1622 ASSERT(!valid());
1623 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1624 &mHandle);
1625 }
1626
1627 // DescriptorSetLayout implementation.
1628 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1629 {
1630 if (valid())
1631 {
1632 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1633 mHandle = VK_NULL_HANDLE;
1634 }
1635 }
1636
1637 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1638 const VkDescriptorSetLayoutCreateInfo &createInfo)
1639 {
1640 ASSERT(!valid());
1641 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1642 }
1643
1644 // DescriptorPool implementation.
1645 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1646 {
1647 if (valid())
1648 {
1649 vkDestroyDescriptorPool(device, mHandle, nullptr);
1650 mHandle = VK_NULL_HANDLE;
1651 }
1652 }
1653
1654 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1655 const VkDescriptorPoolCreateInfo &createInfo)
1656 {
1657 ASSERT(!valid());
1658 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1659 }
1660
1661 ANGLE_INLINE VkResult
1662 DescriptorPool::allocateDescriptorSets(VkDevice device,
1663 const VkDescriptorSetAllocateInfo &allocInfo,
1664 VkDescriptorSet *descriptorSetsOut)
1665 {
1666 ASSERT(valid());
1667 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1668 }
1669
1670 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1671 uint32_t descriptorSetCount,
1672 const VkDescriptorSet *descriptorSets)
1673 {
1674 ASSERT(valid());
1675 ASSERT(descriptorSetCount > 0);
1676 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1677 }
1678
1679 // Sampler implementation.
1680 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1681 {
1682 if (valid())
1683 {
1684 vkDestroySampler(device, mHandle, nullptr);
1685 mHandle = VK_NULL_HANDLE;
1686 }
1687 }
1688
1689 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1690 {
1691 ASSERT(!valid());
1692 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1693 }
1694
1695 // SamplerYuvConversion implementation.
1696 ANGLE_INLINE void SamplerYcbcrConversion::destroy(VkDevice device)
1697 {
1698 if (valid())
1699 {
1700 vkDestroySamplerYcbcrConversionKHR(device, mHandle, nullptr);
1701 mHandle = VK_NULL_HANDLE;
1702 }
1703 }
1704
1705 ANGLE_INLINE VkResult
1706 SamplerYcbcrConversion::init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo)
1707 {
1708 ASSERT(!valid());
1709 return vkCreateSamplerYcbcrConversionKHR(device, &createInfo, nullptr, &mHandle);
1710 }
1711
1712 // Event implementation.
1713 ANGLE_INLINE void Event::destroy(VkDevice device)
1714 {
1715 if (valid())
1716 {
1717 vkDestroyEvent(device, mHandle, nullptr);
1718 mHandle = VK_NULL_HANDLE;
1719 }
1720 }
1721
1722 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
1723 {
1724 ASSERT(!valid());
1725 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
1726 }
1727
1728 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
1729 {
1730 ASSERT(valid());
1731 return vkGetEventStatus(device, mHandle);
1732 }
1733
1734 ANGLE_INLINE VkResult Event::set(VkDevice device) const
1735 {
1736 ASSERT(valid());
1737 return vkSetEvent(device, mHandle);
1738 }
1739
1740 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
1741 {
1742 ASSERT(valid());
1743 return vkResetEvent(device, mHandle);
1744 }
1745
1746 // Fence implementation.
1747 ANGLE_INLINE void Fence::destroy(VkDevice device)
1748 {
1749 if (valid())
1750 {
1751 vkDestroyFence(device, mHandle, nullptr);
1752 mHandle = VK_NULL_HANDLE;
1753 }
1754 }
1755
1756 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
1757 {
1758 ASSERT(!valid());
1759 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
1760 }
1761
1762 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
1763 {
1764 ASSERT(valid());
1765 return vkResetFences(device, 1, &mHandle);
1766 }
1767
1768 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
1769 {
1770 ASSERT(valid());
1771 return vkGetFenceStatus(device, mHandle);
1772 }
1773
1774 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
1775 {
1776 ASSERT(valid());
1777 return vkWaitForFences(device, 1, &mHandle, true, timeout);
1778 }
1779
1780 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
1781 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
1782 {
1783 ASSERT(valid());
1784 return vkImportFenceFdKHR(device, &importFenceFdInfo);
1785 }
1786
1787 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
1788 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
1789 int *fdOut) const
1790 {
1791 ASSERT(valid());
1792 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
1793 }
1794
1795 // QueryPool implementation.
1796 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
1797 {
1798 if (valid())
1799 {
1800 vkDestroyQueryPool(device, mHandle, nullptr);
1801 mHandle = VK_NULL_HANDLE;
1802 }
1803 }
1804
1805 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
1806 {
1807 ASSERT(!valid());
1808 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
1809 }
1810
1811 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
1812 uint32_t firstQuery,
1813 uint32_t queryCount,
1814 size_t dataSize,
1815 void *data,
1816 VkDeviceSize stride,
1817 VkQueryResultFlags flags) const
1818 {
1819 ASSERT(valid());
1820 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
1821 flags);
1822 }
1823
1824 // VirtualBlock implementation.
1825 ANGLE_INLINE void VirtualBlock::destroy(VkDevice device)
1826 {
1827 if (valid())
1828 {
1829 vma::DestroyVirtualBlock(mHandle);
1830 mHandle = VK_NULL_HANDLE;
1831 }
1832 }
1833
1834 ANGLE_INLINE VkResult VirtualBlock::init(VkDevice device,
1835 vma::VirtualBlockCreateFlags flags,
1836 VkDeviceSize size)
1837 {
1838 return vma::CreateVirtualBlock(size, flags, &mHandle);
1839 }
1840
1841 ANGLE_INLINE VkResult VirtualBlock::allocate(VkDeviceSize size,
1842 VkDeviceSize alignment,
1843 VkDeviceSize *offsetOut)
1844 {
1845 return vma::VirtualAllocate(mHandle, size, alignment, offsetOut);
1846 }
1847
1848 ANGLE_INLINE void VirtualBlock::free(VkDeviceSize offset)
1849 {
1850 vma::VirtualFree(mHandle, offset);
1851 }
1852 } // namespace vk
1853 } // namespace rx
1854
1855 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
1856