1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "common/vulkan/vk_headers.h"
15 #include "libANGLE/renderer/renderer_utils.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17 #include "libANGLE/trace.h"
18
19 namespace rx
20 {
21 enum class DescriptorSetIndex : uint32_t;
22
23 namespace vk
24 {
25 // Helper macros that apply to all the wrapped object types.
26 // Unimplemented handle types:
27 // Instance
28 // PhysicalDevice
29 // Device
30 // Queue
31 // DescriptorSet
32
33 #define ANGLE_HANDLE_TYPES_X(FUNC) \
34 FUNC(Allocation) \
35 FUNC(Allocator) \
36 FUNC(Buffer) \
37 FUNC(BufferBlock) \
38 FUNC(BufferView) \
39 FUNC(CommandPool) \
40 FUNC(DescriptorPool) \
41 FUNC(DescriptorSetLayout) \
42 FUNC(DeviceMemory) \
43 FUNC(Event) \
44 FUNC(Fence) \
45 FUNC(Framebuffer) \
46 FUNC(Image) \
47 FUNC(ImageView) \
48 FUNC(Pipeline) \
49 FUNC(PipelineCache) \
50 FUNC(PipelineLayout) \
51 FUNC(QueryPool) \
52 FUNC(RenderPass) \
53 FUNC(Sampler) \
54 FUNC(SamplerYcbcrConversion) \
55 FUNC(Semaphore) \
56 FUNC(ShaderModule)
57
58 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
59
60 enum class HandleType
61 {
62 Invalid,
63 CommandBuffer,
64 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
65 };
66
67 #undef ANGLE_COMMA_SEP_FUNC
68
69 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)70 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
71 namespace priv
72 {
73 class CommandBuffer;
74 } // namespace priv
75 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
76
77 // Returns the HandleType of a Vk Handle.
78 template <typename T>
79 struct HandleTypeHelper;
80
81 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
82 template <> \
83 struct HandleTypeHelper<TYPE> \
84 { \
85 constexpr static HandleType kHandleType = HandleType::TYPE; \
86 };
87
88 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
89 template <>
90 struct HandleTypeHelper<priv::CommandBuffer>
91 {
92 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
93 };
94
95 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
96
97 // Base class for all wrapped vulkan objects. Implements several common helper routines.
98 template <typename DerivedT, typename HandleT>
99 class WrappedObject : angle::NonCopyable
100 {
101 public:
102 HandleT getHandle() const { return mHandle; }
103 void setHandle(HandleT handle) { mHandle = handle; }
104 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
105
106 const HandleT *ptr() const { return &mHandle; }
107
108 HandleT release()
109 {
110 HandleT handle = mHandle;
111 mHandle = VK_NULL_HANDLE;
112 return handle;
113 }
114
115 protected:
116 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
117 ~WrappedObject() { ASSERT(!valid()); }
118
119 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
120 {
121 other.mHandle = VK_NULL_HANDLE;
122 }
123
124 // Only works to initialize empty objects, since we don't have the device handle.
125 WrappedObject &operator=(WrappedObject &&other)
126 {
127 ASSERT(!valid());
128 std::swap(mHandle, other.mHandle);
129 return *this;
130 }
131
132 HandleT mHandle;
133 };
134
135 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
136 {
137 public:
138 CommandPool() = default;
139
140 void destroy(VkDevice device);
141 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
142 void freeCommandBuffers(VkDevice device,
143 uint32_t commandBufferCount,
144 const VkCommandBuffer *commandBuffers);
145
146 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
147 };
148
149 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
150 {
151 public:
152 Pipeline() = default;
153 void destroy(VkDevice device);
154
155 VkResult initGraphics(VkDevice device,
156 const VkGraphicsPipelineCreateInfo &createInfo,
157 const PipelineCache &pipelineCacheVk);
158 VkResult initCompute(VkDevice device,
159 const VkComputePipelineCreateInfo &createInfo,
160 const PipelineCache &pipelineCacheVk);
161 };
162
163 namespace priv
164 {
165
166 // Helper class that wraps a Vulkan command buffer.
167 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
168 {
169 public:
170 CommandBuffer() = default;
171
172 VkCommandBuffer releaseHandle();
173
174 // This is used for normal pool allocated command buffers. It reset the handle.
175 // Note: this method does not require pool synchronization (locking the pool mutex).
176 void destroy(VkDevice device);
177
178 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
179 void destroy(VkDevice device, const CommandPool &commandPool);
180
181 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
182
183 using WrappedObject::operator=;
184
185 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
186 {
187 return (features.inheritedQueries == VK_TRUE);
188 }
189
190 // Vulkan command buffers are executed as secondary command buffers within a primary command
191 // buffer.
192 static constexpr bool ExecutesInline() { return false; }
193
194 VkResult begin(const VkCommandBufferBeginInfo &info);
195
196 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
197
198 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
199 void beginRendering(const VkRenderingInfo &beginInfo);
200
201 void bindDescriptorSets(const PipelineLayout &layout,
202 VkPipelineBindPoint pipelineBindPoint,
203 DescriptorSetIndex firstSet,
204 uint32_t descriptorSetCount,
205 const VkDescriptorSet *descriptorSets,
206 uint32_t dynamicOffsetCount,
207 const uint32_t *dynamicOffsets);
208 void bindGraphicsPipeline(const Pipeline &pipeline);
209 void bindComputePipeline(const Pipeline &pipeline);
210 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
211
212 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
213 void bindVertexBuffers(uint32_t firstBinding,
214 uint32_t bindingCount,
215 const VkBuffer *buffers,
216 const VkDeviceSize *offsets);
217 void bindVertexBuffers2(uint32_t firstBinding,
218 uint32_t bindingCount,
219 const VkBuffer *buffers,
220 const VkDeviceSize *offsets,
221 const VkDeviceSize *sizes,
222 const VkDeviceSize *strides);
223
224 void blitImage(const Image &srcImage,
225 VkImageLayout srcImageLayout,
226 const Image &dstImage,
227 VkImageLayout dstImageLayout,
228 uint32_t regionCount,
229 const VkImageBlit *regions,
230 VkFilter filter);
231
232 void clearColorImage(const Image &image,
233 VkImageLayout imageLayout,
234 const VkClearColorValue &color,
235 uint32_t rangeCount,
236 const VkImageSubresourceRange *ranges);
237 void clearDepthStencilImage(const Image &image,
238 VkImageLayout imageLayout,
239 const VkClearDepthStencilValue &depthStencil,
240 uint32_t rangeCount,
241 const VkImageSubresourceRange *ranges);
242
243 void clearAttachments(uint32_t attachmentCount,
244 const VkClearAttachment *attachments,
245 uint32_t rectCount,
246 const VkClearRect *rects);
247
248 void copyBuffer(const Buffer &srcBuffer,
249 const Buffer &destBuffer,
250 uint32_t regionCount,
251 const VkBufferCopy *regions);
252
253 void copyBufferToImage(VkBuffer srcBuffer,
254 const Image &dstImage,
255 VkImageLayout dstImageLayout,
256 uint32_t regionCount,
257 const VkBufferImageCopy *regions);
258 void copyImageToBuffer(const Image &srcImage,
259 VkImageLayout srcImageLayout,
260 VkBuffer dstBuffer,
261 uint32_t regionCount,
262 const VkBufferImageCopy *regions);
263 void copyImage(const Image &srcImage,
264 VkImageLayout srcImageLayout,
265 const Image &dstImage,
266 VkImageLayout dstImageLayout,
267 uint32_t regionCount,
268 const VkImageCopy *regions);
269
270 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
271 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
272
273 void draw(uint32_t vertexCount,
274 uint32_t instanceCount,
275 uint32_t firstVertex,
276 uint32_t firstInstance);
277 void drawIndexed(uint32_t indexCount,
278 uint32_t instanceCount,
279 uint32_t firstIndex,
280 int32_t vertexOffset,
281 uint32_t firstInstance);
282 void drawIndexedIndirect(const Buffer &buffer,
283 VkDeviceSize offset,
284 uint32_t drawCount,
285 uint32_t stride);
286 void drawIndirect(const Buffer &buffer,
287 VkDeviceSize offset,
288 uint32_t drawCount,
289 uint32_t stride);
290
291 VkResult end();
292 void endQuery(const QueryPool &queryPool, uint32_t query);
293 void endRenderPass();
294 void endRendering();
295 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
296
297 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
298
299 void fillBuffer(const Buffer &dstBuffer,
300 VkDeviceSize dstOffset,
301 VkDeviceSize size,
302 uint32_t data);
303
304 void imageBarrier(VkPipelineStageFlags srcStageMask,
305 VkPipelineStageFlags dstStageMask,
306 const VkImageMemoryBarrier &imageMemoryBarrier);
307
308 void imageBarrier2(const VkImageMemoryBarrier2 &imageMemoryBarrier2);
309
310 void imageWaitEvent(const VkEvent &event,
311 VkPipelineStageFlags srcStageMask,
312 VkPipelineStageFlags dstStageMask,
313 const VkImageMemoryBarrier &imageMemoryBarrier);
314
315 void nextSubpass(VkSubpassContents subpassContents);
316
317 void memoryBarrier(VkPipelineStageFlags srcStageMask,
318 VkPipelineStageFlags dstStageMask,
319 const VkMemoryBarrier &memoryBarrier);
320
321 void memoryBarrier2(const VkMemoryBarrier2 &memoryBarrier2);
322
323 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
324 VkPipelineStageFlags dstStageMask,
325 VkDependencyFlags dependencyFlags,
326 uint32_t memoryBarrierCount,
327 const VkMemoryBarrier *memoryBarriers,
328 uint32_t bufferMemoryBarrierCount,
329 const VkBufferMemoryBarrier *bufferMemoryBarriers,
330 uint32_t imageMemoryBarrierCount,
331 const VkImageMemoryBarrier *imageMemoryBarriers);
332
333 void pipelineBarrier2(VkDependencyFlags dependencyFlags,
334 uint32_t memoryBarrierCount,
335 const VkMemoryBarrier2 *memoryBarriers2,
336 uint32_t bufferMemoryBarrierCount,
337 const VkBufferMemoryBarrier2 *bufferMemoryBarriers2,
338 uint32_t imageMemoryBarrierCount,
339 const VkImageMemoryBarrier2 *imageMemoryBarriers2);
340
341 void pushConstants(const PipelineLayout &layout,
342 VkShaderStageFlags flag,
343 uint32_t offset,
344 uint32_t size,
345 const void *data);
346
347 void setBlendConstants(const float blendConstants[4]);
348 void setCullMode(VkCullModeFlags cullMode);
349 void setDepthBias(float depthBiasConstantFactor,
350 float depthBiasClamp,
351 float depthBiasSlopeFactor);
352 void setDepthBiasEnable(VkBool32 depthBiasEnable);
353 void setDepthCompareOp(VkCompareOp depthCompareOp);
354 void setDepthTestEnable(VkBool32 depthTestEnable);
355 void setDepthWriteEnable(VkBool32 depthWriteEnable);
356 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
357 void setFragmentShadingRate(const VkExtent2D *fragmentSize,
358 VkFragmentShadingRateCombinerOpKHR ops[2]);
359 void setFrontFace(VkFrontFace frontFace);
360 void setLineWidth(float lineWidth);
361 void setLogicOp(VkLogicOp logicOp);
362 void setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable);
363 void setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable);
364 void setRenderingAttachmentLocations(const VkRenderingAttachmentLocationInfoKHR *info);
365 void setRenderingInputAttachmentIndicates(const VkRenderingInputAttachmentIndexInfoKHR *info);
366 void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *scissors);
367 void setStencilCompareMask(uint32_t compareFrontMask, uint32_t compareBackMask);
368 void setStencilOp(VkStencilFaceFlags faceMask,
369 VkStencilOp failOp,
370 VkStencilOp passOp,
371 VkStencilOp depthFailOp,
372 VkCompareOp compareOp);
373 void setStencilReference(uint32_t frontReference, uint32_t backReference);
374 void setStencilTestEnable(VkBool32 stencilTestEnable);
375 void setStencilWriteMask(uint32_t writeFrontMask, uint32_t writeBackMask);
376 void setVertexInput(uint32_t vertexBindingDescriptionCount,
377 const VkVertexInputBindingDescription2EXT *vertexBindingDescriptions,
378 uint32_t vertexAttributeDescriptionCount,
379 const VkVertexInputAttributeDescription2EXT *vertexAttributeDescriptions);
380 void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *viewports);
381 VkResult reset();
382 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
383 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
384 void resolveImage(const Image &srcImage,
385 VkImageLayout srcImageLayout,
386 const Image &dstImage,
387 VkImageLayout dstImageLayout,
388 uint32_t regionCount,
389 const VkImageResolve *regions);
390 void waitEvents(uint32_t eventCount,
391 const VkEvent *events,
392 VkPipelineStageFlags srcStageMask,
393 VkPipelineStageFlags dstStageMask,
394 uint32_t memoryBarrierCount,
395 const VkMemoryBarrier *memoryBarriers,
396 uint32_t bufferMemoryBarrierCount,
397 const VkBufferMemoryBarrier *bufferMemoryBarriers,
398 uint32_t imageMemoryBarrierCount,
399 const VkImageMemoryBarrier *imageMemoryBarriers);
400
401 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
402 const QueryPool &queryPool,
403 uint32_t query);
404
405 void writeTimestamp2(VkPipelineStageFlagBits2 pipelineStage,
406 const QueryPool &queryPool,
407 uint32_t query);
408
409 // VK_EXT_transform_feedback
410 void beginTransformFeedback(uint32_t firstCounterBuffer,
411 uint32_t counterBufferCount,
412 const VkBuffer *counterBuffers,
413 const VkDeviceSize *counterBufferOffsets);
414 void endTransformFeedback(uint32_t firstCounterBuffer,
415 uint32_t counterBufferCount,
416 const VkBuffer *counterBuffers,
417 const VkDeviceSize *counterBufferOffsets);
418 void bindTransformFeedbackBuffers(uint32_t firstBinding,
419 uint32_t bindingCount,
420 const VkBuffer *buffers,
421 const VkDeviceSize *offsets,
422 const VkDeviceSize *sizes);
423
424 // VK_EXT_debug_utils
425 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
426 void endDebugUtilsLabelEXT();
427 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
428 };
429 } // namespace priv
430
431 using PrimaryCommandBuffer = priv::CommandBuffer;
432
433 class Image final : public WrappedObject<Image, VkImage>
434 {
435 public:
436 Image() = default;
437
438 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
439 void setHandle(VkImage handle);
440
441 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
442 void reset();
443
444 // Called on shutdown when the helper class *does* own the handle to the image resource.
445 void destroy(VkDevice device);
446
447 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
448
449 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
450 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
451 VkResult bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo);
452
453 void getSubresourceLayout(VkDevice device,
454 VkImageAspectFlagBits aspectMask,
455 uint32_t mipLevel,
456 uint32_t arrayLayer,
457 VkSubresourceLayout *outSubresourceLayout) const;
458
459 private:
460 friend class ImageMemorySuballocator;
461 };
462
463 class ImageView final : public WrappedObject<ImageView, VkImageView>
464 {
465 public:
466 ImageView() = default;
467 void destroy(VkDevice device);
468
469 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
470 };
471
472 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
473 {
474 public:
475 Semaphore() = default;
476 void destroy(VkDevice device);
477
478 VkResult init(VkDevice device);
479 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
480 };
481
482 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
483 {
484 public:
485 Framebuffer() = default;
486 void destroy(VkDevice device);
487
488 // Use this method only in necessary cases. (RenderPass)
489 void setHandle(VkFramebuffer handle);
490
491 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
492 };
493
494 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
495 {
496 public:
497 DeviceMemory() = default;
498 void destroy(VkDevice device);
499
500 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
501 VkResult map(VkDevice device,
502 VkDeviceSize offset,
503 VkDeviceSize size,
504 VkMemoryMapFlags flags,
505 uint8_t **mapPointer) const;
506 void unmap(VkDevice device) const;
507 void flush(VkDevice device, VkMappedMemoryRange &memRange);
508 void invalidate(VkDevice device, VkMappedMemoryRange &memRange);
509 };
510
511 class Allocator : public WrappedObject<Allocator, VmaAllocator>
512 {
513 public:
514 Allocator() = default;
515 void destroy();
516
517 VkResult init(VkPhysicalDevice physicalDevice,
518 VkDevice device,
519 VkInstance instance,
520 uint32_t apiVersion,
521 VkDeviceSize preferredLargeHeapBlockSize);
522
523 // Initializes the buffer handle and memory allocation.
524 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
525 VkMemoryPropertyFlags requiredFlags,
526 VkMemoryPropertyFlags preferredFlags,
527 bool persistentlyMappedBuffers,
528 uint32_t *memoryTypeIndexOut,
529 Buffer *bufferOut,
530 Allocation *allocationOut) const;
531
532 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
533 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
534 VkMemoryPropertyFlags requiredFlags,
535 VkMemoryPropertyFlags preferredFlags,
536 bool persistentlyMappedBuffers,
537 uint32_t *memoryTypeIndexOut) const;
538
539 void buildStatsString(char **statsString, VkBool32 detailedMap);
540 void freeStatsString(char *statsString);
541 };
542
543 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
544 {
545 public:
546 Allocation() = default;
547 void destroy(const Allocator &allocator);
548
549 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
550 void unmap(const Allocator &allocator) const;
551 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
552 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
553
554 private:
555 friend class Allocator;
556 friend class ImageMemorySuballocator;
557 };
558
559 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
560 {
561 public:
562 RenderPass() = default;
563 void destroy(VkDevice device);
564
565 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
566 VkResult init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo);
567 };
568
569 enum class StagingUsage
570 {
571 Read,
572 Write,
573 Both,
574 };
575
576 class Buffer final : public WrappedObject<Buffer, VkBuffer>
577 {
578 public:
579 Buffer() = default;
580 void destroy(VkDevice device);
581
582 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
583 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory, VkDeviceSize offset);
584 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
585
586 private:
587 friend class Allocator;
588 };
589
590 class BufferView final : public WrappedObject<BufferView, VkBufferView>
591 {
592 public:
593 BufferView() = default;
594 void destroy(VkDevice device);
595
596 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
597 };
598
599 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
600 {
601 public:
602 ShaderModule() = default;
603 void destroy(VkDevice device);
604
605 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
606 };
607
608 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
609 {
610 public:
611 PipelineLayout() = default;
612 void destroy(VkDevice device);
613
614 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
615 };
616
617 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
618 {
619 public:
620 PipelineCache() = default;
621 void destroy(VkDevice device);
622
623 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
624 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData) const;
625 VkResult merge(VkDevice device, uint32_t srcCacheCount, const VkPipelineCache *srcCaches) const;
626 };
627
628 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
629 {
630 public:
631 DescriptorSetLayout() = default;
632 void destroy(VkDevice device);
633
634 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
635 };
636
637 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
638 {
639 public:
640 DescriptorPool() = default;
641 void destroy(VkDevice device);
642
643 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
644
645 VkResult allocateDescriptorSets(VkDevice device,
646 const VkDescriptorSetAllocateInfo &allocInfo,
647 VkDescriptorSet *descriptorSetsOut);
648 VkResult freeDescriptorSets(VkDevice device,
649 uint32_t descriptorSetCount,
650 const VkDescriptorSet *descriptorSets);
651 };
652
653 class Sampler final : public WrappedObject<Sampler, VkSampler>
654 {
655 public:
656 Sampler() = default;
657 void destroy(VkDevice device);
658 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
659 };
660
661 class SamplerYcbcrConversion final
662 : public WrappedObject<SamplerYcbcrConversion, VkSamplerYcbcrConversion>
663 {
664 public:
665 SamplerYcbcrConversion() = default;
666 void destroy(VkDevice device);
667 VkResult init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo);
668 };
669
670 class Event final : public WrappedObject<Event, VkEvent>
671 {
672 public:
673 Event() = default;
674 void destroy(VkDevice device);
675 using WrappedObject::operator=;
676
677 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
678 VkResult getStatus(VkDevice device) const;
679 VkResult set(VkDevice device) const;
680 VkResult reset(VkDevice device) const;
681 };
682
683 class Fence final : public WrappedObject<Fence, VkFence>
684 {
685 public:
686 Fence() = default;
687 void destroy(VkDevice device);
688 using WrappedObject::operator=;
689
690 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
691 VkResult reset(VkDevice device);
692 VkResult getStatus(VkDevice device) const;
693 VkResult wait(VkDevice device, uint64_t timeout) const;
694 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
695 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
696 };
697
698 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
699 {
700 public:
701 QueryPool() = default;
702 void destroy(VkDevice device);
703
704 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
705 VkResult getResults(VkDevice device,
706 uint32_t firstQuery,
707 uint32_t queryCount,
708 size_t dataSize,
709 void *data,
710 VkDeviceSize stride,
711 VkQueryResultFlags flags) const;
712 };
713
714 // VirtualBlock
715 class VirtualBlock final : public WrappedObject<VirtualBlock, VmaVirtualBlock>
716 {
717 public:
718 VirtualBlock() = default;
719 void destroy(VkDevice device);
720 VkResult init(VkDevice device, vma::VirtualBlockCreateFlags flags, VkDeviceSize size);
721
722 VkResult allocate(VkDeviceSize size,
723 VkDeviceSize alignment,
724 VmaVirtualAllocation *allocationOut,
725 VkDeviceSize *offsetOut);
726 void free(VmaVirtualAllocation allocation, VkDeviceSize offset);
727 void calculateStats(vma::StatInfo *pStatInfo) const;
728 };
729
730 // CommandPool implementation.
731 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
732 {
733 if (valid())
734 {
735 vkDestroyCommandPool(device, mHandle, nullptr);
736 mHandle = VK_NULL_HANDLE;
737 }
738 }
739
740 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
741 {
742 ASSERT(valid());
743 return vkResetCommandPool(device, mHandle, flags);
744 }
745
746 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
747 uint32_t commandBufferCount,
748 const VkCommandBuffer *commandBuffers)
749 {
750 ASSERT(valid());
751 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
752 }
753
754 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
755 {
756 ASSERT(!valid());
757 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
758 }
759
760 namespace priv
761 {
762
763 // CommandBuffer implementation.
764 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
765 {
766 VkCommandBuffer handle = mHandle;
767 mHandle = nullptr;
768 return handle;
769 }
770
771 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
772 const VkCommandBufferAllocateInfo &createInfo)
773 {
774 ASSERT(!valid());
775 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
776 }
777
778 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
779 VkImageLayout srcImageLayout,
780 const Image &dstImage,
781 VkImageLayout dstImageLayout,
782 uint32_t regionCount,
783 const VkImageBlit *regions,
784 VkFilter filter)
785 {
786 ASSERT(valid() && srcImage.valid() && dstImage.valid());
787 ASSERT(regionCount == 1);
788 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
789 dstImageLayout, 1, regions, filter);
790 }
791
792 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
793 {
794 ASSERT(valid());
795 return vkBeginCommandBuffer(mHandle, &info);
796 }
797
798 ANGLE_INLINE VkResult CommandBuffer::end()
799 {
800 ASSERT(valid());
801 return vkEndCommandBuffer(mHandle);
802 }
803
804 ANGLE_INLINE VkResult CommandBuffer::reset()
805 {
806 ASSERT(valid());
807 return vkResetCommandBuffer(mHandle, 0);
808 }
809
810 ANGLE_INLINE void CommandBuffer::nextSubpass(VkSubpassContents subpassContents)
811 {
812 ASSERT(valid());
813 vkCmdNextSubpass(mHandle, subpassContents);
814 }
815
816 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
817 VkPipelineStageFlags dstStageMask,
818 const VkMemoryBarrier &memoryBarrier)
819 {
820 ASSERT(valid());
821 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, &memoryBarrier, 0, nullptr, 0,
822 nullptr);
823 }
824
825 ANGLE_INLINE void CommandBuffer::memoryBarrier2(const VkMemoryBarrier2 &memoryBarrier2)
826 {
827 ASSERT(valid());
828 VkDependencyInfo pDependencyInfo = {};
829 pDependencyInfo.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
830 pDependencyInfo.memoryBarrierCount = 1;
831 pDependencyInfo.pMemoryBarriers = &memoryBarrier2;
832 pDependencyInfo.bufferMemoryBarrierCount = 0;
833 pDependencyInfo.pBufferMemoryBarriers = nullptr;
834 pDependencyInfo.imageMemoryBarrierCount = 0;
835 pDependencyInfo.pImageMemoryBarriers = nullptr;
836 vkCmdPipelineBarrier2KHR(mHandle, &pDependencyInfo);
837 }
838
839 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
840 VkPipelineStageFlags dstStageMask,
841 VkDependencyFlags dependencyFlags,
842 uint32_t memoryBarrierCount,
843 const VkMemoryBarrier *memoryBarriers,
844 uint32_t bufferMemoryBarrierCount,
845 const VkBufferMemoryBarrier *bufferMemoryBarriers,
846 uint32_t imageMemoryBarrierCount,
847 const VkImageMemoryBarrier *imageMemoryBarriers)
848 {
849 ASSERT(valid());
850 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
851 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
852 imageMemoryBarrierCount, imageMemoryBarriers);
853 }
854
855 ANGLE_INLINE void CommandBuffer::pipelineBarrier2(
856 VkDependencyFlags dependencyFlags,
857 uint32_t memoryBarrierCount,
858 const VkMemoryBarrier2 *memoryBarriers2,
859 uint32_t bufferMemoryBarrierCount,
860 const VkBufferMemoryBarrier2 *bufferMemoryBarriers2,
861 uint32_t imageMemoryBarrierCount,
862 const VkImageMemoryBarrier2 *imageMemoryBarriers2)
863 {
864 ASSERT(valid());
865 VkDependencyInfo dependencyInfo = {};
866 dependencyInfo.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
867 dependencyInfo.pNext = nullptr;
868 dependencyInfo.dependencyFlags = dependencyFlags;
869 dependencyInfo.memoryBarrierCount = memoryBarrierCount;
870 dependencyInfo.pMemoryBarriers = memoryBarriers2;
871 dependencyInfo.bufferMemoryBarrierCount = bufferMemoryBarrierCount;
872 dependencyInfo.pBufferMemoryBarriers = bufferMemoryBarriers2;
873 dependencyInfo.imageMemoryBarrierCount = imageMemoryBarrierCount;
874 dependencyInfo.pImageMemoryBarriers = imageMemoryBarriers2;
875 vkCmdPipelineBarrier2KHR(mHandle, &dependencyInfo);
876 }
877
878 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
879 VkPipelineStageFlags dstStageMask,
880 const VkImageMemoryBarrier &imageMemoryBarrier)
881 {
882 ASSERT(valid());
883 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
884 &imageMemoryBarrier);
885 }
886
887 ANGLE_INLINE void CommandBuffer::imageBarrier2(const VkImageMemoryBarrier2 &imageMemoryBarrier2)
888 {
889 ASSERT(valid());
890
891 VkDependencyInfo pDependencyInfo = {};
892 pDependencyInfo.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
893 pDependencyInfo.memoryBarrierCount = 0;
894 pDependencyInfo.pMemoryBarriers = nullptr;
895 pDependencyInfo.bufferMemoryBarrierCount = 0;
896 pDependencyInfo.pBufferMemoryBarriers = nullptr;
897 pDependencyInfo.imageMemoryBarrierCount = 1;
898 pDependencyInfo.pImageMemoryBarriers = &imageMemoryBarrier2;
899 vkCmdPipelineBarrier2KHR(mHandle, &pDependencyInfo);
900 }
901
902 ANGLE_INLINE void CommandBuffer::imageWaitEvent(const VkEvent &event,
903 VkPipelineStageFlags srcStageMask,
904 VkPipelineStageFlags dstStageMask,
905 const VkImageMemoryBarrier &imageMemoryBarrier)
906 {
907 ASSERT(valid());
908 vkCmdWaitEvents(mHandle, 1, &event, srcStageMask, dstStageMask, 0, nullptr, 0, nullptr, 1,
909 &imageMemoryBarrier);
910 }
911
912 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
913 {
914 // Note: do not add code that may access the pool in any way, because this method may be called
915 // without taking the pool mutex lock.
916 releaseHandle();
917 }
918
919 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
920 {
921 if (valid())
922 {
923 ASSERT(commandPool.valid());
924 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
925 mHandle = VK_NULL_HANDLE;
926 }
927 }
928
929 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
930 const Buffer &destBuffer,
931 uint32_t regionCount,
932 const VkBufferCopy *regions)
933 {
934 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
935 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
936 }
937
938 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
939 const Image &dstImage,
940 VkImageLayout dstImageLayout,
941 uint32_t regionCount,
942 const VkBufferImageCopy *regions)
943 {
944 ASSERT(valid() && dstImage.valid());
945 ASSERT(srcBuffer != VK_NULL_HANDLE);
946 ASSERT(regionCount == 1);
947 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
948 }
949
950 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
951 VkImageLayout srcImageLayout,
952 VkBuffer dstBuffer,
953 uint32_t regionCount,
954 const VkBufferImageCopy *regions)
955 {
956 ASSERT(valid() && srcImage.valid());
957 ASSERT(dstBuffer != VK_NULL_HANDLE);
958 ASSERT(regionCount == 1);
959 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
960 }
961
962 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
963 VkImageLayout imageLayout,
964 const VkClearColorValue &color,
965 uint32_t rangeCount,
966 const VkImageSubresourceRange *ranges)
967 {
968 ASSERT(valid());
969 ASSERT(rangeCount == 1);
970 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
971 }
972
973 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
974 const Image &image,
975 VkImageLayout imageLayout,
976 const VkClearDepthStencilValue &depthStencil,
977 uint32_t rangeCount,
978 const VkImageSubresourceRange *ranges)
979 {
980 ASSERT(valid());
981 ASSERT(rangeCount == 1);
982 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
983 }
984
985 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
986 const VkClearAttachment *attachments,
987 uint32_t rectCount,
988 const VkClearRect *rects)
989 {
990 ASSERT(valid());
991 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
992 }
993
994 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
995 VkImageLayout srcImageLayout,
996 const Image &dstImage,
997 VkImageLayout dstImageLayout,
998 uint32_t regionCount,
999 const VkImageCopy *regions)
1000 {
1001 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1002 ASSERT(regionCount == 1);
1003 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1004 dstImageLayout, 1, regions);
1005 }
1006
1007 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
1008 VkSubpassContents subpassContents)
1009 {
1010 ASSERT(valid());
1011 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
1012 }
1013
1014 ANGLE_INLINE void CommandBuffer::beginRendering(const VkRenderingInfo &beginInfo)
1015 {
1016 ASSERT(valid());
1017 vkCmdBeginRenderingKHR(mHandle, &beginInfo);
1018 }
1019
1020 ANGLE_INLINE void CommandBuffer::endRenderPass()
1021 {
1022 ASSERT(valid());
1023 vkCmdEndRenderPass(mHandle);
1024 }
1025
1026 ANGLE_INLINE void CommandBuffer::endRendering()
1027 {
1028 ASSERT(valid());
1029 vkCmdEndRenderingKHR(mHandle);
1030 }
1031
1032 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
1033 VkDeviceSize offset,
1034 VkIndexType indexType)
1035 {
1036 ASSERT(valid());
1037 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
1038 }
1039
1040 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
1041 VkPipelineBindPoint pipelineBindPoint,
1042 DescriptorSetIndex firstSet,
1043 uint32_t descriptorSetCount,
1044 const VkDescriptorSet *descriptorSets,
1045 uint32_t dynamicOffsetCount,
1046 const uint32_t *dynamicOffsets)
1047 {
1048 ASSERT(valid() && layout.valid());
1049 vkCmdBindDescriptorSets(this->mHandle, pipelineBindPoint, layout.getHandle(),
1050 ToUnderlying(firstSet), descriptorSetCount, descriptorSets,
1051 dynamicOffsetCount, dynamicOffsets);
1052 }
1053
1054 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
1055 const CommandBuffer *commandBuffers)
1056 {
1057 ASSERT(valid());
1058 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
1059 }
1060
1061 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
1062 size_t *allocatedMemoryOut) const
1063 {
1064 // No data available.
1065 *usedMemoryOut = 0;
1066 *allocatedMemoryOut = 1;
1067 }
1068
1069 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
1070 VkDeviceSize dstOffset,
1071 VkDeviceSize size,
1072 uint32_t data)
1073 {
1074 ASSERT(valid());
1075 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
1076 }
1077
1078 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
1079 VkShaderStageFlags flag,
1080 uint32_t offset,
1081 uint32_t size,
1082 const void *data)
1083 {
1084 ASSERT(valid() && layout.valid());
1085 ASSERT(offset == 0);
1086 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
1087 }
1088
1089 ANGLE_INLINE void CommandBuffer::setBlendConstants(const float blendConstants[4])
1090 {
1091 ASSERT(valid());
1092 vkCmdSetBlendConstants(mHandle, blendConstants);
1093 }
1094
1095 ANGLE_INLINE void CommandBuffer::setCullMode(VkCullModeFlags cullMode)
1096 {
1097 ASSERT(valid());
1098 vkCmdSetCullModeEXT(mHandle, cullMode);
1099 }
1100
1101 ANGLE_INLINE void CommandBuffer::setDepthBias(float depthBiasConstantFactor,
1102 float depthBiasClamp,
1103 float depthBiasSlopeFactor)
1104 {
1105 ASSERT(valid());
1106 vkCmdSetDepthBias(mHandle, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1107 }
1108
1109 ANGLE_INLINE void CommandBuffer::setDepthBiasEnable(VkBool32 depthBiasEnable)
1110 {
1111 ASSERT(valid());
1112 vkCmdSetDepthBiasEnableEXT(mHandle, depthBiasEnable);
1113 }
1114
1115 ANGLE_INLINE void CommandBuffer::setDepthCompareOp(VkCompareOp depthCompareOp)
1116 {
1117 ASSERT(valid());
1118 vkCmdSetDepthCompareOpEXT(mHandle, depthCompareOp);
1119 }
1120
1121 ANGLE_INLINE void CommandBuffer::setDepthTestEnable(VkBool32 depthTestEnable)
1122 {
1123 ASSERT(valid());
1124 vkCmdSetDepthTestEnableEXT(mHandle, depthTestEnable);
1125 }
1126
1127 ANGLE_INLINE void CommandBuffer::setDepthWriteEnable(VkBool32 depthWriteEnable)
1128 {
1129 ASSERT(valid());
1130 vkCmdSetDepthWriteEnableEXT(mHandle, depthWriteEnable);
1131 }
1132
1133 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
1134 {
1135 ASSERT(valid() && event != VK_NULL_HANDLE);
1136 vkCmdSetEvent(mHandle, event, stageMask);
1137 }
1138
1139 ANGLE_INLINE void CommandBuffer::setFragmentShadingRate(const VkExtent2D *fragmentSize,
1140 VkFragmentShadingRateCombinerOpKHR ops[2])
1141 {
1142 ASSERT(valid() && fragmentSize != nullptr);
1143 vkCmdSetFragmentShadingRateKHR(mHandle, fragmentSize, ops);
1144 }
1145
1146 ANGLE_INLINE void CommandBuffer::setFrontFace(VkFrontFace frontFace)
1147 {
1148 ASSERT(valid());
1149 vkCmdSetFrontFaceEXT(mHandle, frontFace);
1150 }
1151
1152 ANGLE_INLINE void CommandBuffer::setLineWidth(float lineWidth)
1153 {
1154 ASSERT(valid());
1155 vkCmdSetLineWidth(mHandle, lineWidth);
1156 }
1157
1158 ANGLE_INLINE void CommandBuffer::setLogicOp(VkLogicOp logicOp)
1159 {
1160 ASSERT(valid());
1161 vkCmdSetLogicOpEXT(mHandle, logicOp);
1162 }
1163
1164 ANGLE_INLINE void CommandBuffer::setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable)
1165 {
1166 ASSERT(valid());
1167 vkCmdSetPrimitiveRestartEnableEXT(mHandle, primitiveRestartEnable);
1168 }
1169
1170 ANGLE_INLINE void CommandBuffer::setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable)
1171 {
1172 ASSERT(valid());
1173 vkCmdSetRasterizerDiscardEnableEXT(mHandle, rasterizerDiscardEnable);
1174 }
1175
1176 ANGLE_INLINE void CommandBuffer::setRenderingAttachmentLocations(
1177 const VkRenderingAttachmentLocationInfoKHR *info)
1178 {
1179 ASSERT(valid());
1180 vkCmdSetRenderingAttachmentLocationsKHR(mHandle, info);
1181 }
1182
1183 ANGLE_INLINE void CommandBuffer::setRenderingInputAttachmentIndicates(
1184 const VkRenderingInputAttachmentIndexInfoKHR *info)
1185 {
1186 ASSERT(valid());
1187 vkCmdSetRenderingInputAttachmentIndicesKHR(mHandle, info);
1188 }
1189
1190 ANGLE_INLINE void CommandBuffer::setScissor(uint32_t firstScissor,
1191 uint32_t scissorCount,
1192 const VkRect2D *scissors)
1193 {
1194 ASSERT(valid() && scissors != nullptr);
1195 vkCmdSetScissor(mHandle, firstScissor, scissorCount, scissors);
1196 }
1197
1198 ANGLE_INLINE void CommandBuffer::setStencilCompareMask(uint32_t compareFrontMask,
1199 uint32_t compareBackMask)
1200 {
1201 ASSERT(valid());
1202 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, compareFrontMask);
1203 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_BACK_BIT, compareBackMask);
1204 }
1205
1206 ANGLE_INLINE void CommandBuffer::setStencilOp(VkStencilFaceFlags faceMask,
1207 VkStencilOp failOp,
1208 VkStencilOp passOp,
1209 VkStencilOp depthFailOp,
1210 VkCompareOp compareOp)
1211 {
1212 ASSERT(valid());
1213 vkCmdSetStencilOpEXT(mHandle, faceMask, failOp, passOp, depthFailOp, compareOp);
1214 }
1215
1216 ANGLE_INLINE void CommandBuffer::setStencilReference(uint32_t frontReference,
1217 uint32_t backReference)
1218 {
1219 ASSERT(valid());
1220 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_FRONT_BIT, frontReference);
1221 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_BACK_BIT, backReference);
1222 }
1223
1224 ANGLE_INLINE void CommandBuffer::setStencilTestEnable(VkBool32 stencilTestEnable)
1225 {
1226 ASSERT(valid());
1227 vkCmdSetStencilTestEnableEXT(mHandle, stencilTestEnable);
1228 }
1229
1230 ANGLE_INLINE void CommandBuffer::setStencilWriteMask(uint32_t writeFrontMask,
1231 uint32_t writeBackMask)
1232 {
1233 ASSERT(valid());
1234 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, writeFrontMask);
1235 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_BACK_BIT, writeBackMask);
1236 }
1237
1238 ANGLE_INLINE void CommandBuffer::setVertexInput(
1239 uint32_t vertexBindingDescriptionCount,
1240 const VkVertexInputBindingDescription2EXT *VertexBindingDescriptions,
1241 uint32_t vertexAttributeDescriptionCount,
1242 const VkVertexInputAttributeDescription2EXT *VertexAttributeDescriptions)
1243 {
1244 ASSERT(valid());
1245 vkCmdSetVertexInputEXT(mHandle, vertexBindingDescriptionCount, VertexBindingDescriptions,
1246 vertexAttributeDescriptionCount, VertexAttributeDescriptions);
1247 }
1248
1249 ANGLE_INLINE void CommandBuffer::setViewport(uint32_t firstViewport,
1250 uint32_t viewportCount,
1251 const VkViewport *viewports)
1252 {
1253 ASSERT(valid() && viewports != nullptr);
1254 vkCmdSetViewport(mHandle, firstViewport, viewportCount, viewports);
1255 }
1256
1257 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
1258 {
1259 ASSERT(valid() && event != VK_NULL_HANDLE);
1260 vkCmdResetEvent(mHandle, event, stageMask);
1261 }
1262
1263 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
1264 const VkEvent *events,
1265 VkPipelineStageFlags srcStageMask,
1266 VkPipelineStageFlags dstStageMask,
1267 uint32_t memoryBarrierCount,
1268 const VkMemoryBarrier *memoryBarriers,
1269 uint32_t bufferMemoryBarrierCount,
1270 const VkBufferMemoryBarrier *bufferMemoryBarriers,
1271 uint32_t imageMemoryBarrierCount,
1272 const VkImageMemoryBarrier *imageMemoryBarriers)
1273 {
1274 ASSERT(valid());
1275 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
1276 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
1277 imageMemoryBarrierCount, imageMemoryBarriers);
1278 }
1279
1280 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
1281 uint32_t firstQuery,
1282 uint32_t queryCount)
1283 {
1284 ASSERT(valid() && queryPool.valid());
1285 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
1286 }
1287
1288 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
1289 VkImageLayout srcImageLayout,
1290 const Image &dstImage,
1291 VkImageLayout dstImageLayout,
1292 uint32_t regionCount,
1293 const VkImageResolve *regions)
1294 {
1295 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1296 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1297 dstImageLayout, regionCount, regions);
1298 }
1299
1300 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
1301 uint32_t query,
1302 VkQueryControlFlags flags)
1303 {
1304 ASSERT(valid() && queryPool.valid());
1305 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
1306 }
1307
1308 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1309 {
1310 ASSERT(valid() && queryPool.valid());
1311 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1312 }
1313
1314 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1315 const QueryPool &queryPool,
1316 uint32_t query)
1317 {
1318 ASSERT(valid());
1319 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1320 }
1321
1322 ANGLE_INLINE void CommandBuffer::writeTimestamp2(VkPipelineStageFlagBits2 pipelineStage,
1323 const QueryPool &queryPool,
1324 uint32_t query)
1325 {
1326 ASSERT(valid());
1327 vkCmdWriteTimestamp2KHR(mHandle, pipelineStage, queryPool.getHandle(), query);
1328 }
1329
1330 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1331 uint32_t instanceCount,
1332 uint32_t firstVertex,
1333 uint32_t firstInstance)
1334 {
1335 ASSERT(valid());
1336 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1337 }
1338
1339 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1340 uint32_t instanceCount,
1341 uint32_t firstIndex,
1342 int32_t vertexOffset,
1343 uint32_t firstInstance)
1344 {
1345 ASSERT(valid());
1346 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1347 }
1348
1349 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1350 VkDeviceSize offset,
1351 uint32_t drawCount,
1352 uint32_t stride)
1353 {
1354 ASSERT(valid());
1355 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1356 }
1357
1358 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1359 VkDeviceSize offset,
1360 uint32_t drawCount,
1361 uint32_t stride)
1362 {
1363 ASSERT(valid());
1364 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1365 }
1366
1367 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1368 uint32_t groupCountY,
1369 uint32_t groupCountZ)
1370 {
1371 ASSERT(valid());
1372 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1373 }
1374
1375 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1376 {
1377 ASSERT(valid());
1378 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1379 }
1380
1381 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1382 const Pipeline &pipeline)
1383 {
1384 ASSERT(valid() && pipeline.valid());
1385 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1386 }
1387
1388 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1389 {
1390 ASSERT(valid() && pipeline.valid());
1391 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1392 }
1393
1394 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1395 {
1396 ASSERT(valid() && pipeline.valid());
1397 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1398 }
1399
1400 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1401 uint32_t bindingCount,
1402 const VkBuffer *buffers,
1403 const VkDeviceSize *offsets)
1404 {
1405 ASSERT(valid());
1406 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1407 }
1408
1409 ANGLE_INLINE void CommandBuffer::bindVertexBuffers2(uint32_t firstBinding,
1410 uint32_t bindingCount,
1411 const VkBuffer *buffers,
1412 const VkDeviceSize *offsets,
1413 const VkDeviceSize *sizes,
1414 const VkDeviceSize *strides)
1415 {
1416 ASSERT(valid());
1417 vkCmdBindVertexBuffers2EXT(mHandle, firstBinding, bindingCount, buffers, offsets, sizes,
1418 strides);
1419 }
1420
1421 ANGLE_INLINE void CommandBuffer::beginTransformFeedback(uint32_t firstCounterBuffer,
1422 uint32_t counterBufferCount,
1423 const VkBuffer *counterBuffers,
1424 const VkDeviceSize *counterBufferOffsets)
1425 {
1426 ASSERT(valid());
1427 ASSERT(vkCmdBeginTransformFeedbackEXT);
1428 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1429 counterBufferOffsets);
1430 }
1431
1432 ANGLE_INLINE void CommandBuffer::endTransformFeedback(uint32_t firstCounterBuffer,
1433 uint32_t counterBufferCount,
1434 const VkBuffer *counterBuffers,
1435 const VkDeviceSize *counterBufferOffsets)
1436 {
1437 ASSERT(valid());
1438 ASSERT(vkCmdEndTransformFeedbackEXT);
1439 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1440 counterBufferOffsets);
1441 }
1442
1443 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffers(uint32_t firstBinding,
1444 uint32_t bindingCount,
1445 const VkBuffer *buffers,
1446 const VkDeviceSize *offsets,
1447 const VkDeviceSize *sizes)
1448 {
1449 ASSERT(valid());
1450 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1451 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1452 sizes);
1453 }
1454
1455 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1456 {
1457 ASSERT(valid());
1458 {
1459 #if !defined(ANGLE_SHARED_LIBVULKAN)
1460 // When the vulkan-loader is statically linked, we need to use the extension
1461 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1462 // with volk, this will default to the function definitions with no namespace
1463 using rx::vkCmdBeginDebugUtilsLabelEXT;
1464 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1465 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1466 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1467 }
1468 }
1469
1470 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1471 {
1472 ASSERT(valid());
1473 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1474 vkCmdEndDebugUtilsLabelEXT(mHandle);
1475 }
1476
1477 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1478 {
1479 ASSERT(valid());
1480 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1481 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1482 }
1483 } // namespace priv
1484
1485 // Image implementation.
1486 ANGLE_INLINE void Image::setHandle(VkImage handle)
1487 {
1488 mHandle = handle;
1489 }
1490
1491 ANGLE_INLINE void Image::reset()
1492 {
1493 mHandle = VK_NULL_HANDLE;
1494 }
1495
1496 ANGLE_INLINE void Image::destroy(VkDevice device)
1497 {
1498 if (valid())
1499 {
1500 vkDestroyImage(device, mHandle, nullptr);
1501 mHandle = VK_NULL_HANDLE;
1502 }
1503 }
1504
1505 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1506 {
1507 ASSERT(!valid());
1508 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1509 }
1510
1511 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1512 VkMemoryRequirements *requirementsOut) const
1513 {
1514 ASSERT(valid());
1515 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1516 }
1517
1518 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1519 {
1520 ASSERT(valid() && deviceMemory.valid());
1521 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1522 }
1523
1524 ANGLE_INLINE VkResult Image::bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo)
1525 {
1526 ASSERT(valid());
1527 return vkBindImageMemory2(device, 1, &bindInfo);
1528 }
1529
1530 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1531 VkImageAspectFlagBits aspectMask,
1532 uint32_t mipLevel,
1533 uint32_t arrayLayer,
1534 VkSubresourceLayout *outSubresourceLayout) const
1535 {
1536 VkImageSubresource subresource = {};
1537 subresource.aspectMask = aspectMask;
1538 subresource.mipLevel = mipLevel;
1539 subresource.arrayLayer = arrayLayer;
1540
1541 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1542 }
1543
1544 // ImageView implementation.
1545 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1546 {
1547 if (valid())
1548 {
1549 vkDestroyImageView(device, mHandle, nullptr);
1550 mHandle = VK_NULL_HANDLE;
1551 }
1552 }
1553
1554 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1555 {
1556 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1557 }
1558
1559 // Semaphore implementation.
1560 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1561 {
1562 if (valid())
1563 {
1564 vkDestroySemaphore(device, mHandle, nullptr);
1565 mHandle = VK_NULL_HANDLE;
1566 }
1567 }
1568
1569 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1570 {
1571 ASSERT(!valid());
1572
1573 VkSemaphoreCreateInfo semaphoreInfo = {};
1574 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1575 semaphoreInfo.flags = 0;
1576
1577 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1578 }
1579
1580 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1581 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1582 {
1583 ASSERT(valid());
1584 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1585 }
1586
1587 // Framebuffer implementation.
1588 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1589 {
1590 if (valid())
1591 {
1592 vkDestroyFramebuffer(device, mHandle, nullptr);
1593 mHandle = VK_NULL_HANDLE;
1594 }
1595 }
1596
1597 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1598 {
1599 ASSERT(!valid());
1600 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1601 }
1602
1603 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1604 {
1605 mHandle = handle;
1606 }
1607
1608 // DeviceMemory implementation.
1609 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1610 {
1611 if (valid())
1612 {
1613 vkFreeMemory(device, mHandle, nullptr);
1614 mHandle = VK_NULL_HANDLE;
1615 }
1616 }
1617
1618 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1619 {
1620 ASSERT(!valid());
1621 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1622 }
1623
1624 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1625 VkDeviceSize offset,
1626 VkDeviceSize size,
1627 VkMemoryMapFlags flags,
1628 uint8_t **mapPointer) const
1629 {
1630 ASSERT(valid());
1631 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1632 }
1633
1634 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1635 {
1636 ASSERT(valid());
1637 vkUnmapMemory(device, mHandle);
1638 }
1639
1640 ANGLE_INLINE void DeviceMemory::flush(VkDevice device, VkMappedMemoryRange &memRange)
1641 {
1642 vkFlushMappedMemoryRanges(device, 1, &memRange);
1643 }
1644
1645 ANGLE_INLINE void DeviceMemory::invalidate(VkDevice device, VkMappedMemoryRange &memRange)
1646 {
1647 vkInvalidateMappedMemoryRanges(device, 1, &memRange);
1648 }
1649
1650 // Allocator implementation.
1651 ANGLE_INLINE void Allocator::destroy()
1652 {
1653 if (valid())
1654 {
1655 vma::DestroyAllocator(mHandle);
1656 mHandle = VK_NULL_HANDLE;
1657 }
1658 }
1659
1660 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1661 VkDevice device,
1662 VkInstance instance,
1663 uint32_t apiVersion,
1664 VkDeviceSize preferredLargeHeapBlockSize)
1665 {
1666 ASSERT(!valid());
1667 return vma::InitAllocator(physicalDevice, device, instance, apiVersion,
1668 preferredLargeHeapBlockSize, &mHandle);
1669 }
1670
1671 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1672 VkMemoryPropertyFlags requiredFlags,
1673 VkMemoryPropertyFlags preferredFlags,
1674 bool persistentlyMappedBuffers,
1675 uint32_t *memoryTypeIndexOut,
1676 Buffer *bufferOut,
1677 Allocation *allocationOut) const
1678 {
1679 ASSERT(valid());
1680 ASSERT(bufferOut && !bufferOut->valid());
1681 ASSERT(allocationOut && !allocationOut->valid());
1682 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1683 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1684 &allocationOut->mHandle);
1685 }
1686
1687 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1688 VkMemoryPropertyFlags *flagsOut) const
1689 {
1690 ASSERT(valid());
1691 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1692 }
1693
1694 ANGLE_INLINE VkResult
1695 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1696 VkMemoryPropertyFlags requiredFlags,
1697 VkMemoryPropertyFlags preferredFlags,
1698 bool persistentlyMappedBuffers,
1699 uint32_t *memoryTypeIndexOut) const
1700 {
1701 ASSERT(valid());
1702 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1703 preferredFlags, persistentlyMappedBuffers,
1704 memoryTypeIndexOut);
1705 }
1706
1707 ANGLE_INLINE void Allocator::buildStatsString(char **statsString, VkBool32 detailedMap)
1708 {
1709 ASSERT(valid());
1710 vma::BuildStatsString(mHandle, statsString, detailedMap);
1711 }
1712
1713 ANGLE_INLINE void Allocator::freeStatsString(char *statsString)
1714 {
1715 ASSERT(valid());
1716 vma::FreeStatsString(mHandle, statsString);
1717 }
1718
1719 // Allocation implementation.
1720 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1721 {
1722 if (valid())
1723 {
1724 vma::FreeMemory(allocator.getHandle(), mHandle);
1725 mHandle = VK_NULL_HANDLE;
1726 }
1727 }
1728
1729 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1730 {
1731 ASSERT(valid());
1732 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1733 }
1734
1735 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1736 {
1737 ASSERT(valid());
1738 vma::UnmapMemory(allocator.getHandle(), mHandle);
1739 }
1740
1741 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1742 VkDeviceSize offset,
1743 VkDeviceSize size) const
1744 {
1745 ASSERT(valid());
1746 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1747 }
1748
1749 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1750 VkDeviceSize offset,
1751 VkDeviceSize size) const
1752 {
1753 ASSERT(valid());
1754 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1755 }
1756
1757 // RenderPass implementation.
1758 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1759 {
1760 if (valid())
1761 {
1762 vkDestroyRenderPass(device, mHandle, nullptr);
1763 mHandle = VK_NULL_HANDLE;
1764 }
1765 }
1766
1767 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1768 {
1769 ASSERT(!valid());
1770 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1771 }
1772
1773 ANGLE_INLINE VkResult RenderPass::init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo)
1774 {
1775 ASSERT(!valid());
1776 return vkCreateRenderPass2KHR(device, &createInfo, nullptr, &mHandle);
1777 }
1778
1779 // Buffer implementation.
1780 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1781 {
1782 if (valid())
1783 {
1784 vkDestroyBuffer(device, mHandle, nullptr);
1785 mHandle = VK_NULL_HANDLE;
1786 }
1787 }
1788
1789 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1790 {
1791 ASSERT(!valid());
1792 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1793 }
1794
1795 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device,
1796 const DeviceMemory &deviceMemory,
1797 VkDeviceSize offset)
1798 {
1799 ASSERT(valid() && deviceMemory.valid());
1800 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), offset);
1801 }
1802
1803 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1804 VkMemoryRequirements *memoryRequirementsOut)
1805 {
1806 ASSERT(valid());
1807 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1808 }
1809
1810 // BufferView implementation.
1811 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1812 {
1813 if (valid())
1814 {
1815 vkDestroyBufferView(device, mHandle, nullptr);
1816 mHandle = VK_NULL_HANDLE;
1817 }
1818 }
1819
1820 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1821 {
1822 ASSERT(!valid());
1823 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1824 }
1825
1826 // ShaderModule implementation.
1827 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1828 {
1829 if (mHandle != VK_NULL_HANDLE)
1830 {
1831 vkDestroyShaderModule(device, mHandle, nullptr);
1832 mHandle = VK_NULL_HANDLE;
1833 }
1834 }
1835
1836 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1837 const VkShaderModuleCreateInfo &createInfo)
1838 {
1839 ASSERT(!valid());
1840 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1841 }
1842
1843 // PipelineLayout implementation.
1844 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1845 {
1846 if (valid())
1847 {
1848 vkDestroyPipelineLayout(device, mHandle, nullptr);
1849 mHandle = VK_NULL_HANDLE;
1850 }
1851 }
1852
1853 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1854 const VkPipelineLayoutCreateInfo &createInfo)
1855 {
1856 ASSERT(!valid());
1857 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1858 }
1859
1860 // PipelineCache implementation.
1861 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1862 {
1863 if (valid())
1864 {
1865 vkDestroyPipelineCache(device, mHandle, nullptr);
1866 mHandle = VK_NULL_HANDLE;
1867 }
1868 }
1869
1870 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1871 const VkPipelineCacheCreateInfo &createInfo)
1872 {
1873 ASSERT(!valid());
1874 // Note: if we are concerned with memory usage of this cache, we should give it custom
1875 // allocators. Also, failure of this function is of little importance.
1876 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1877 }
1878
1879 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1880 uint32_t srcCacheCount,
1881 const VkPipelineCache *srcCaches) const
1882 {
1883 ASSERT(valid());
1884 return vkMergePipelineCaches(device, mHandle, srcCacheCount, srcCaches);
1885 }
1886
1887 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1888 size_t *cacheSize,
1889 void *cacheData) const
1890 {
1891 ASSERT(valid());
1892
1893 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1894 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1895 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1896 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1897 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1898 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1899 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1900 }
1901
1902 // Pipeline implementation.
1903 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1904 {
1905 if (valid())
1906 {
1907 vkDestroyPipeline(device, mHandle, nullptr);
1908 mHandle = VK_NULL_HANDLE;
1909 }
1910 }
1911
1912 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1913 const VkGraphicsPipelineCreateInfo &createInfo,
1914 const PipelineCache &pipelineCacheVk)
1915 {
1916 ASSERT(!valid());
1917 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1918 &mHandle);
1919 }
1920
1921 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1922 const VkComputePipelineCreateInfo &createInfo,
1923 const PipelineCache &pipelineCacheVk)
1924 {
1925 ASSERT(!valid());
1926 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1927 &mHandle);
1928 }
1929
1930 // DescriptorSetLayout implementation.
1931 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1932 {
1933 if (valid())
1934 {
1935 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1936 mHandle = VK_NULL_HANDLE;
1937 }
1938 }
1939
1940 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1941 const VkDescriptorSetLayoutCreateInfo &createInfo)
1942 {
1943 ASSERT(!valid());
1944 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1945 }
1946
1947 // DescriptorPool implementation.
1948 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1949 {
1950 if (valid())
1951 {
1952 vkDestroyDescriptorPool(device, mHandle, nullptr);
1953 mHandle = VK_NULL_HANDLE;
1954 }
1955 }
1956
1957 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1958 const VkDescriptorPoolCreateInfo &createInfo)
1959 {
1960 ASSERT(!valid());
1961 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1962 }
1963
1964 ANGLE_INLINE VkResult
1965 DescriptorPool::allocateDescriptorSets(VkDevice device,
1966 const VkDescriptorSetAllocateInfo &allocInfo,
1967 VkDescriptorSet *descriptorSetsOut)
1968 {
1969 ASSERT(valid());
1970 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1971 }
1972
1973 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1974 uint32_t descriptorSetCount,
1975 const VkDescriptorSet *descriptorSets)
1976 {
1977 ASSERT(valid());
1978 ASSERT(descriptorSetCount > 0);
1979 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1980 }
1981
1982 // Sampler implementation.
1983 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1984 {
1985 if (valid())
1986 {
1987 vkDestroySampler(device, mHandle, nullptr);
1988 mHandle = VK_NULL_HANDLE;
1989 }
1990 }
1991
1992 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1993 {
1994 ASSERT(!valid());
1995 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1996 }
1997
1998 // SamplerYuvConversion implementation.
1999 ANGLE_INLINE void SamplerYcbcrConversion::destroy(VkDevice device)
2000 {
2001 if (valid())
2002 {
2003 vkDestroySamplerYcbcrConversion(device, mHandle, nullptr);
2004 mHandle = VK_NULL_HANDLE;
2005 }
2006 }
2007
2008 ANGLE_INLINE VkResult
2009 SamplerYcbcrConversion::init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo)
2010 {
2011 ASSERT(!valid());
2012 return vkCreateSamplerYcbcrConversion(device, &createInfo, nullptr, &mHandle);
2013 }
2014
2015 // Event implementation.
2016 ANGLE_INLINE void Event::destroy(VkDevice device)
2017 {
2018 if (valid())
2019 {
2020 vkDestroyEvent(device, mHandle, nullptr);
2021 mHandle = VK_NULL_HANDLE;
2022 }
2023 }
2024
2025 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
2026 {
2027 ASSERT(!valid());
2028 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
2029 }
2030
2031 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
2032 {
2033 ASSERT(valid());
2034 return vkGetEventStatus(device, mHandle);
2035 }
2036
2037 ANGLE_INLINE VkResult Event::set(VkDevice device) const
2038 {
2039 ASSERT(valid());
2040 return vkSetEvent(device, mHandle);
2041 }
2042
2043 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
2044 {
2045 ASSERT(valid());
2046 return vkResetEvent(device, mHandle);
2047 }
2048
2049 // Fence implementation.
2050 ANGLE_INLINE void Fence::destroy(VkDevice device)
2051 {
2052 if (valid())
2053 {
2054 vkDestroyFence(device, mHandle, nullptr);
2055 mHandle = VK_NULL_HANDLE;
2056 }
2057 }
2058
2059 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
2060 {
2061 ASSERT(!valid());
2062 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
2063 }
2064
2065 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
2066 {
2067 ASSERT(valid());
2068 return vkResetFences(device, 1, &mHandle);
2069 }
2070
2071 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
2072 {
2073 ASSERT(valid());
2074 return vkGetFenceStatus(device, mHandle);
2075 }
2076
2077 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
2078 {
2079 ASSERT(valid());
2080 return vkWaitForFences(device, 1, &mHandle, true, timeout);
2081 }
2082
2083 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
2084 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
2085 {
2086 ASSERT(valid());
2087 return vkImportFenceFdKHR(device, &importFenceFdInfo);
2088 }
2089
2090 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
2091 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
2092 int *fdOut) const
2093 {
2094 ASSERT(valid());
2095 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
2096 }
2097
2098 // QueryPool implementation.
2099 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
2100 {
2101 if (valid())
2102 {
2103 vkDestroyQueryPool(device, mHandle, nullptr);
2104 mHandle = VK_NULL_HANDLE;
2105 }
2106 }
2107
2108 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
2109 {
2110 ASSERT(!valid());
2111 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
2112 }
2113
2114 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
2115 uint32_t firstQuery,
2116 uint32_t queryCount,
2117 size_t dataSize,
2118 void *data,
2119 VkDeviceSize stride,
2120 VkQueryResultFlags flags) const
2121 {
2122 ASSERT(valid());
2123 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
2124 flags);
2125 }
2126
2127 // VirtualBlock implementation.
2128 ANGLE_INLINE void VirtualBlock::destroy(VkDevice device)
2129 {
2130 if (valid())
2131 {
2132 vma::DestroyVirtualBlock(mHandle);
2133 mHandle = VK_NULL_HANDLE;
2134 }
2135 }
2136
2137 ANGLE_INLINE VkResult VirtualBlock::init(VkDevice device,
2138 vma::VirtualBlockCreateFlags flags,
2139 VkDeviceSize size)
2140 {
2141 return vma::CreateVirtualBlock(size, flags, &mHandle);
2142 }
2143
2144 ANGLE_INLINE VkResult VirtualBlock::allocate(VkDeviceSize size,
2145 VkDeviceSize alignment,
2146 VmaVirtualAllocation *allocationOut,
2147 VkDeviceSize *offsetOut)
2148 {
2149 return vma::VirtualAllocate(mHandle, size, alignment, allocationOut, offsetOut);
2150 }
2151
2152 ANGLE_INLINE void VirtualBlock::free(VmaVirtualAllocation allocation, VkDeviceSize offset)
2153 {
2154 vma::VirtualFree(mHandle, allocation, offset);
2155 }
2156
2157 ANGLE_INLINE void VirtualBlock::calculateStats(vma::StatInfo *pStatInfo) const
2158 {
2159 vma::CalculateVirtualBlockStats(mHandle, pStatInfo);
2160 }
2161 } // namespace vk
2162 } // namespace rx
2163
2164 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
2165