1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "common/vulkan/vk_headers.h"
15 #include "libANGLE/renderer/renderer_utils.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17 #include "libANGLE/trace.h"
18
19 namespace rx
20 {
21 enum class DescriptorSetIndex : uint32_t;
22
23 namespace vk
24 {
25 // Helper macros that apply to all the wrapped object types.
26 // Unimplemented handle types:
27 // Instance
28 // PhysicalDevice
29 // Device
30 // Queue
31 // DescriptorSet
32
33 #define ANGLE_HANDLE_TYPES_X(FUNC) \
34 FUNC(Allocation) \
35 FUNC(Allocator) \
36 FUNC(Buffer) \
37 FUNC(BufferBlock) \
38 FUNC(BufferView) \
39 FUNC(CommandPool) \
40 FUNC(DescriptorPool) \
41 FUNC(DescriptorSetLayout) \
42 FUNC(DeviceMemory) \
43 FUNC(Event) \
44 FUNC(Fence) \
45 FUNC(Framebuffer) \
46 FUNC(Image) \
47 FUNC(ImageView) \
48 FUNC(Pipeline) \
49 FUNC(PipelineCache) \
50 FUNC(PipelineLayout) \
51 FUNC(QueryPool) \
52 FUNC(RenderPass) \
53 FUNC(Sampler) \
54 FUNC(SamplerYcbcrConversion) \
55 FUNC(Semaphore) \
56 FUNC(ShaderModule)
57
58 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
59
60 enum class HandleType
61 {
62 Invalid,
63 CommandBuffer,
64 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
65 };
66
67 #undef ANGLE_COMMA_SEP_FUNC
68
69 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)70 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
71 namespace priv
72 {
73 class CommandBuffer;
74 } // namespace priv
75 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
76
77 // Returns the HandleType of a Vk Handle.
78 template <typename T>
79 struct HandleTypeHelper;
80
81 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
82 template <> \
83 struct HandleTypeHelper<TYPE> \
84 { \
85 constexpr static HandleType kHandleType = HandleType::TYPE; \
86 };
87
88 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
89 template <>
90 struct HandleTypeHelper<priv::CommandBuffer>
91 {
92 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
93 };
94
95 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
96
97 // Base class for all wrapped vulkan objects. Implements several common helper routines.
98 template <typename DerivedT, typename HandleT>
99 class WrappedObject : angle::NonCopyable
100 {
101 public:
102 HandleT getHandle() const { return mHandle; }
103 void setHandle(HandleT handle) { mHandle = handle; }
104 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
105
106 const HandleT *ptr() const { return &mHandle; }
107
108 HandleT release()
109 {
110 HandleT handle = mHandle;
111 mHandle = VK_NULL_HANDLE;
112 return handle;
113 }
114
115 protected:
116 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
117 ~WrappedObject() { ASSERT(!valid()); }
118
119 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
120 {
121 other.mHandle = VK_NULL_HANDLE;
122 }
123
124 // Only works to initialize empty objects, since we don't have the device handle.
125 WrappedObject &operator=(WrappedObject &&other)
126 {
127 ASSERT(!valid());
128 std::swap(mHandle, other.mHandle);
129 return *this;
130 }
131
132 HandleT mHandle;
133 };
134
135 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
136 {
137 public:
138 CommandPool() = default;
139
140 void destroy(VkDevice device);
141 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
142 void freeCommandBuffers(VkDevice device,
143 uint32_t commandBufferCount,
144 const VkCommandBuffer *commandBuffers);
145
146 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
147 };
148
149 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
150 {
151 public:
152 Pipeline() = default;
153 void destroy(VkDevice device);
154
155 VkResult initGraphics(VkDevice device,
156 const VkGraphicsPipelineCreateInfo &createInfo,
157 const PipelineCache &pipelineCacheVk);
158 VkResult initCompute(VkDevice device,
159 const VkComputePipelineCreateInfo &createInfo,
160 const PipelineCache &pipelineCacheVk);
161 };
162
163 namespace priv
164 {
165
166 // Helper class that wraps a Vulkan command buffer.
167 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
168 {
169 public:
170 CommandBuffer() = default;
171
172 VkCommandBuffer releaseHandle();
173
174 // This is used for normal pool allocated command buffers. It reset the handle.
175 void destroy(VkDevice device);
176
177 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
178 void destroy(VkDevice device, const CommandPool &commandPool);
179
180 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
181
182 using WrappedObject::operator=;
183
184 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
185 {
186 return (features.inheritedQueries == VK_TRUE);
187 }
188
189 // Vulkan command buffers are executed as secondary command buffers within a primary command
190 // buffer.
191 static constexpr bool ExecutesInline() { return false; }
192
193 VkResult begin(const VkCommandBufferBeginInfo &info);
194
195 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
196
197 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
198
199 void bindDescriptorSets(const PipelineLayout &layout,
200 VkPipelineBindPoint pipelineBindPoint,
201 DescriptorSetIndex firstSet,
202 uint32_t descriptorSetCount,
203 const VkDescriptorSet *descriptorSets,
204 uint32_t dynamicOffsetCount,
205 const uint32_t *dynamicOffsets);
206 void bindGraphicsPipeline(const Pipeline &pipeline);
207 void bindComputePipeline(const Pipeline &pipeline);
208 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
209
210 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
211 void bindVertexBuffers(uint32_t firstBinding,
212 uint32_t bindingCount,
213 const VkBuffer *buffers,
214 const VkDeviceSize *offsets);
215 void bindVertexBuffers2(uint32_t firstBinding,
216 uint32_t bindingCount,
217 const VkBuffer *buffers,
218 const VkDeviceSize *offsets,
219 const VkDeviceSize *sizes,
220 const VkDeviceSize *strides);
221
222 void blitImage(const Image &srcImage,
223 VkImageLayout srcImageLayout,
224 const Image &dstImage,
225 VkImageLayout dstImageLayout,
226 uint32_t regionCount,
227 const VkImageBlit *regions,
228 VkFilter filter);
229
230 void clearColorImage(const Image &image,
231 VkImageLayout imageLayout,
232 const VkClearColorValue &color,
233 uint32_t rangeCount,
234 const VkImageSubresourceRange *ranges);
235 void clearDepthStencilImage(const Image &image,
236 VkImageLayout imageLayout,
237 const VkClearDepthStencilValue &depthStencil,
238 uint32_t rangeCount,
239 const VkImageSubresourceRange *ranges);
240
241 void clearAttachments(uint32_t attachmentCount,
242 const VkClearAttachment *attachments,
243 uint32_t rectCount,
244 const VkClearRect *rects);
245
246 void copyBuffer(const Buffer &srcBuffer,
247 const Buffer &destBuffer,
248 uint32_t regionCount,
249 const VkBufferCopy *regions);
250
251 void copyBufferToImage(VkBuffer srcBuffer,
252 const Image &dstImage,
253 VkImageLayout dstImageLayout,
254 uint32_t regionCount,
255 const VkBufferImageCopy *regions);
256 void copyImageToBuffer(const Image &srcImage,
257 VkImageLayout srcImageLayout,
258 VkBuffer dstBuffer,
259 uint32_t regionCount,
260 const VkBufferImageCopy *regions);
261 void copyImage(const Image &srcImage,
262 VkImageLayout srcImageLayout,
263 const Image &dstImage,
264 VkImageLayout dstImageLayout,
265 uint32_t regionCount,
266 const VkImageCopy *regions);
267
268 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
269 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
270
271 void draw(uint32_t vertexCount,
272 uint32_t instanceCount,
273 uint32_t firstVertex,
274 uint32_t firstInstance);
275 void drawIndexed(uint32_t indexCount,
276 uint32_t instanceCount,
277 uint32_t firstIndex,
278 int32_t vertexOffset,
279 uint32_t firstInstance);
280 void drawIndexedIndirect(const Buffer &buffer,
281 VkDeviceSize offset,
282 uint32_t drawCount,
283 uint32_t stride);
284 void drawIndirect(const Buffer &buffer,
285 VkDeviceSize offset,
286 uint32_t drawCount,
287 uint32_t stride);
288
289 VkResult end();
290 void endQuery(const QueryPool &queryPool, uint32_t query);
291 void endRenderPass();
292 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
293
294 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
295
296 void fillBuffer(const Buffer &dstBuffer,
297 VkDeviceSize dstOffset,
298 VkDeviceSize size,
299 uint32_t data);
300
301 void imageBarrier(VkPipelineStageFlags srcStageMask,
302 VkPipelineStageFlags dstStageMask,
303 const VkImageMemoryBarrier &imageMemoryBarrier);
304
305 void imageWaitEvent(const VkEvent &event,
306 VkPipelineStageFlags srcStageMask,
307 VkPipelineStageFlags dstStageMask,
308 const VkImageMemoryBarrier &imageMemoryBarrier);
309
310 void nextSubpass(VkSubpassContents subpassContents);
311
312 void memoryBarrier(VkPipelineStageFlags srcStageMask,
313 VkPipelineStageFlags dstStageMask,
314 const VkMemoryBarrier &memoryBarrier);
315
316 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
317 VkPipelineStageFlags dstStageMask,
318 VkDependencyFlags dependencyFlags,
319 uint32_t memoryBarrierCount,
320 const VkMemoryBarrier *memoryBarriers,
321 uint32_t bufferMemoryBarrierCount,
322 const VkBufferMemoryBarrier *bufferMemoryBarriers,
323 uint32_t imageMemoryBarrierCount,
324 const VkImageMemoryBarrier *imageMemoryBarriers);
325
326 void pushConstants(const PipelineLayout &layout,
327 VkShaderStageFlags flag,
328 uint32_t offset,
329 uint32_t size,
330 const void *data);
331
332 void setBlendConstants(const float blendConstants[4]);
333 void setCullMode(VkCullModeFlags cullMode);
334 void setDepthBias(float depthBiasConstantFactor,
335 float depthBiasClamp,
336 float depthBiasSlopeFactor);
337 void setDepthBiasEnable(VkBool32 depthBiasEnable);
338 void setDepthCompareOp(VkCompareOp depthCompareOp);
339 void setDepthTestEnable(VkBool32 depthTestEnable);
340 void setDepthWriteEnable(VkBool32 depthWriteEnable);
341 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
342 void setFragmentShadingRate(const VkExtent2D *fragmentSize,
343 VkFragmentShadingRateCombinerOpKHR ops[2]);
344 void setFrontFace(VkFrontFace frontFace);
345 void setLineWidth(float lineWidth);
346 void setLogicOp(VkLogicOp logicOp);
347 void setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable);
348 void setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable);
349 void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *scissors);
350 void setStencilCompareMask(uint32_t compareFrontMask, uint32_t compareBackMask);
351 void setStencilOp(VkStencilFaceFlags faceMask,
352 VkStencilOp failOp,
353 VkStencilOp passOp,
354 VkStencilOp depthFailOp,
355 VkCompareOp compareOp);
356 void setStencilReference(uint32_t frontReference, uint32_t backReference);
357 void setStencilTestEnable(VkBool32 stencilTestEnable);
358 void setStencilWriteMask(uint32_t writeFrontMask, uint32_t writeBackMask);
359 void setVertexInput(uint32_t vertexBindingDescriptionCount,
360 const VkVertexInputBindingDescription2EXT *vertexBindingDescriptions,
361 uint32_t vertexAttributeDescriptionCount,
362 const VkVertexInputAttributeDescription2EXT *vertexAttributeDescriptions);
363 void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *viewports);
364 VkResult reset();
365 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
366 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
367 void resolveImage(const Image &srcImage,
368 VkImageLayout srcImageLayout,
369 const Image &dstImage,
370 VkImageLayout dstImageLayout,
371 uint32_t regionCount,
372 const VkImageResolve *regions);
373 void waitEvents(uint32_t eventCount,
374 const VkEvent *events,
375 VkPipelineStageFlags srcStageMask,
376 VkPipelineStageFlags dstStageMask,
377 uint32_t memoryBarrierCount,
378 const VkMemoryBarrier *memoryBarriers,
379 uint32_t bufferMemoryBarrierCount,
380 const VkBufferMemoryBarrier *bufferMemoryBarriers,
381 uint32_t imageMemoryBarrierCount,
382 const VkImageMemoryBarrier *imageMemoryBarriers);
383
384 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
385 const QueryPool &queryPool,
386 uint32_t query);
387
388 // VK_EXT_transform_feedback
389 void beginTransformFeedback(uint32_t firstCounterBuffer,
390 uint32_t counterBufferCount,
391 const VkBuffer *counterBuffers,
392 const VkDeviceSize *counterBufferOffsets);
393 void endTransformFeedback(uint32_t firstCounterBuffer,
394 uint32_t counterBufferCount,
395 const VkBuffer *counterBuffers,
396 const VkDeviceSize *counterBufferOffsets);
397 void bindTransformFeedbackBuffers(uint32_t firstBinding,
398 uint32_t bindingCount,
399 const VkBuffer *buffers,
400 const VkDeviceSize *offsets,
401 const VkDeviceSize *sizes);
402
403 // VK_EXT_debug_utils
404 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
405 void endDebugUtilsLabelEXT();
406 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
407 };
408 } // namespace priv
409
410 using PrimaryCommandBuffer = priv::CommandBuffer;
411
412 class Image final : public WrappedObject<Image, VkImage>
413 {
414 public:
415 Image() = default;
416
417 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
418 void setHandle(VkImage handle);
419
420 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
421 void reset();
422
423 // Called on shutdown when the helper class *does* own the handle to the image resource.
424 void destroy(VkDevice device);
425
426 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
427
428 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
429 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
430 VkResult bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo);
431
432 void getSubresourceLayout(VkDevice device,
433 VkImageAspectFlagBits aspectMask,
434 uint32_t mipLevel,
435 uint32_t arrayLayer,
436 VkSubresourceLayout *outSubresourceLayout) const;
437
438 private:
439 friend class ImageMemorySuballocator;
440 };
441
442 class ImageView final : public WrappedObject<ImageView, VkImageView>
443 {
444 public:
445 ImageView() = default;
446 void destroy(VkDevice device);
447
448 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
449 };
450
451 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
452 {
453 public:
454 Semaphore() = default;
455 void destroy(VkDevice device);
456
457 VkResult init(VkDevice device);
458 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
459 };
460
461 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
462 {
463 public:
464 Framebuffer() = default;
465 void destroy(VkDevice device);
466
467 // Use this method only in necessary cases. (RenderPass)
468 void setHandle(VkFramebuffer handle);
469
470 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
471 };
472
473 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
474 {
475 public:
476 DeviceMemory() = default;
477 void destroy(VkDevice device);
478
479 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
480 VkResult map(VkDevice device,
481 VkDeviceSize offset,
482 VkDeviceSize size,
483 VkMemoryMapFlags flags,
484 uint8_t **mapPointer) const;
485 void unmap(VkDevice device) const;
486 void flush(VkDevice device, VkMappedMemoryRange &memRange);
487 void invalidate(VkDevice device, VkMappedMemoryRange &memRange);
488 };
489
490 class Allocator : public WrappedObject<Allocator, VmaAllocator>
491 {
492 public:
493 Allocator() = default;
494 void destroy();
495
496 VkResult init(VkPhysicalDevice physicalDevice,
497 VkDevice device,
498 VkInstance instance,
499 uint32_t apiVersion,
500 VkDeviceSize preferredLargeHeapBlockSize);
501
502 // Initializes the buffer handle and memory allocation.
503 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
504 VkMemoryPropertyFlags requiredFlags,
505 VkMemoryPropertyFlags preferredFlags,
506 bool persistentlyMappedBuffers,
507 uint32_t *memoryTypeIndexOut,
508 Buffer *bufferOut,
509 Allocation *allocationOut) const;
510
511 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
512 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
513 VkMemoryPropertyFlags requiredFlags,
514 VkMemoryPropertyFlags preferredFlags,
515 bool persistentlyMappedBuffers,
516 uint32_t *memoryTypeIndexOut) const;
517
518 void buildStatsString(char **statsString, VkBool32 detailedMap);
519 void freeStatsString(char *statsString);
520 };
521
522 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
523 {
524 public:
525 Allocation() = default;
526 void destroy(const Allocator &allocator);
527
528 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
529 void unmap(const Allocator &allocator) const;
530 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
531 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
532
533 private:
534 friend class Allocator;
535 friend class ImageMemorySuballocator;
536 };
537
538 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
539 {
540 public:
541 RenderPass() = default;
542 void destroy(VkDevice device);
543
544 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
545 VkResult init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo);
546 };
547
548 enum class StagingUsage
549 {
550 Read,
551 Write,
552 Both,
553 };
554
555 class Buffer final : public WrappedObject<Buffer, VkBuffer>
556 {
557 public:
558 Buffer() = default;
559 void destroy(VkDevice device);
560
561 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
562 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory, VkDeviceSize offset);
563 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
564
565 private:
566 friend class Allocator;
567 };
568
569 class BufferView final : public WrappedObject<BufferView, VkBufferView>
570 {
571 public:
572 BufferView() = default;
573 void destroy(VkDevice device);
574
575 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
576 };
577
578 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
579 {
580 public:
581 ShaderModule() = default;
582 void destroy(VkDevice device);
583
584 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
585 };
586
587 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
588 {
589 public:
590 PipelineLayout() = default;
591 void destroy(VkDevice device);
592
593 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
594 };
595
596 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
597 {
598 public:
599 PipelineCache() = default;
600 void destroy(VkDevice device);
601
602 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
603 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData) const;
604 VkResult merge(VkDevice device, uint32_t srcCacheCount, const VkPipelineCache *srcCaches) const;
605 };
606
607 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
608 {
609 public:
610 DescriptorSetLayout() = default;
611 void destroy(VkDevice device);
612
613 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
614 };
615
616 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
617 {
618 public:
619 DescriptorPool() = default;
620 void destroy(VkDevice device);
621
622 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
623
624 VkResult allocateDescriptorSets(VkDevice device,
625 const VkDescriptorSetAllocateInfo &allocInfo,
626 VkDescriptorSet *descriptorSetsOut);
627 VkResult freeDescriptorSets(VkDevice device,
628 uint32_t descriptorSetCount,
629 const VkDescriptorSet *descriptorSets);
630 };
631
632 class Sampler final : public WrappedObject<Sampler, VkSampler>
633 {
634 public:
635 Sampler() = default;
636 void destroy(VkDevice device);
637 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
638 };
639
640 class SamplerYcbcrConversion final
641 : public WrappedObject<SamplerYcbcrConversion, VkSamplerYcbcrConversion>
642 {
643 public:
644 SamplerYcbcrConversion() = default;
645 void destroy(VkDevice device);
646 VkResult init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo);
647 };
648
649 class Event final : public WrappedObject<Event, VkEvent>
650 {
651 public:
652 Event() = default;
653 void destroy(VkDevice device);
654 using WrappedObject::operator=;
655
656 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
657 VkResult getStatus(VkDevice device) const;
658 VkResult set(VkDevice device) const;
659 VkResult reset(VkDevice device) const;
660 };
661
662 class Fence final : public WrappedObject<Fence, VkFence>
663 {
664 public:
665 Fence() = default;
666 void destroy(VkDevice device);
667 using WrappedObject::operator=;
668
669 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
670 VkResult reset(VkDevice device);
671 VkResult getStatus(VkDevice device) const;
672 VkResult wait(VkDevice device, uint64_t timeout) const;
673 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
674 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
675 };
676
677 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
678 {
679 public:
680 QueryPool() = default;
681 void destroy(VkDevice device);
682
683 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
684 VkResult getResults(VkDevice device,
685 uint32_t firstQuery,
686 uint32_t queryCount,
687 size_t dataSize,
688 void *data,
689 VkDeviceSize stride,
690 VkQueryResultFlags flags) const;
691 };
692
693 // VirtualBlock
694 class VirtualBlock final : public WrappedObject<VirtualBlock, VmaVirtualBlock>
695 {
696 public:
697 VirtualBlock() = default;
698 void destroy(VkDevice device);
699 VkResult init(VkDevice device, vma::VirtualBlockCreateFlags flags, VkDeviceSize size);
700
701 VkResult allocate(VkDeviceSize size,
702 VkDeviceSize alignment,
703 VmaVirtualAllocation *allocationOut,
704 VkDeviceSize *offsetOut);
705 void free(VmaVirtualAllocation allocation, VkDeviceSize offset);
706 void calculateStats(vma::StatInfo *pStatInfo) const;
707 };
708
709 // CommandPool implementation.
710 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
711 {
712 if (valid())
713 {
714 vkDestroyCommandPool(device, mHandle, nullptr);
715 mHandle = VK_NULL_HANDLE;
716 }
717 }
718
719 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
720 {
721 ASSERT(valid());
722 return vkResetCommandPool(device, mHandle, flags);
723 }
724
725 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
726 uint32_t commandBufferCount,
727 const VkCommandBuffer *commandBuffers)
728 {
729 ASSERT(valid());
730 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
731 }
732
733 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
734 {
735 ASSERT(!valid());
736 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
737 }
738
739 namespace priv
740 {
741
742 // CommandBuffer implementation.
743 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
744 {
745 VkCommandBuffer handle = mHandle;
746 mHandle = nullptr;
747 return handle;
748 }
749
750 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
751 const VkCommandBufferAllocateInfo &createInfo)
752 {
753 ASSERT(!valid());
754 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
755 }
756
757 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
758 VkImageLayout srcImageLayout,
759 const Image &dstImage,
760 VkImageLayout dstImageLayout,
761 uint32_t regionCount,
762 const VkImageBlit *regions,
763 VkFilter filter)
764 {
765 ASSERT(valid() && srcImage.valid() && dstImage.valid());
766 ASSERT(regionCount == 1);
767 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
768 dstImageLayout, 1, regions, filter);
769 }
770
771 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
772 {
773 ASSERT(valid());
774 return vkBeginCommandBuffer(mHandle, &info);
775 }
776
777 ANGLE_INLINE VkResult CommandBuffer::end()
778 {
779 ASSERT(valid());
780 return vkEndCommandBuffer(mHandle);
781 }
782
783 ANGLE_INLINE VkResult CommandBuffer::reset()
784 {
785 ASSERT(valid());
786 return vkResetCommandBuffer(mHandle, 0);
787 }
788
789 ANGLE_INLINE void CommandBuffer::nextSubpass(VkSubpassContents subpassContents)
790 {
791 ASSERT(valid());
792 vkCmdNextSubpass(mHandle, subpassContents);
793 }
794
795 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
796 VkPipelineStageFlags dstStageMask,
797 const VkMemoryBarrier &memoryBarrier)
798 {
799 ASSERT(valid());
800 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, &memoryBarrier, 0, nullptr, 0,
801 nullptr);
802 }
803
804 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
805 VkPipelineStageFlags dstStageMask,
806 VkDependencyFlags dependencyFlags,
807 uint32_t memoryBarrierCount,
808 const VkMemoryBarrier *memoryBarriers,
809 uint32_t bufferMemoryBarrierCount,
810 const VkBufferMemoryBarrier *bufferMemoryBarriers,
811 uint32_t imageMemoryBarrierCount,
812 const VkImageMemoryBarrier *imageMemoryBarriers)
813 {
814 ASSERT(valid());
815 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
816 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
817 imageMemoryBarrierCount, imageMemoryBarriers);
818 }
819
820 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
821 VkPipelineStageFlags dstStageMask,
822 const VkImageMemoryBarrier &imageMemoryBarrier)
823 {
824 ASSERT(valid());
825 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
826 &imageMemoryBarrier);
827 }
828
829 ANGLE_INLINE void CommandBuffer::imageWaitEvent(const VkEvent &event,
830 VkPipelineStageFlags srcStageMask,
831 VkPipelineStageFlags dstStageMask,
832 const VkImageMemoryBarrier &imageMemoryBarrier)
833 {
834 ASSERT(valid());
835 vkCmdWaitEvents(mHandle, 1, &event, srcStageMask, dstStageMask, 0, nullptr, 0, nullptr, 1,
836 &imageMemoryBarrier);
837 }
838
839 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
840 {
841 releaseHandle();
842 }
843
844 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
845 {
846 if (valid())
847 {
848 ASSERT(commandPool.valid());
849 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
850 mHandle = VK_NULL_HANDLE;
851 }
852 }
853
854 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
855 const Buffer &destBuffer,
856 uint32_t regionCount,
857 const VkBufferCopy *regions)
858 {
859 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
860 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
861 }
862
863 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
864 const Image &dstImage,
865 VkImageLayout dstImageLayout,
866 uint32_t regionCount,
867 const VkBufferImageCopy *regions)
868 {
869 ASSERT(valid() && dstImage.valid());
870 ASSERT(srcBuffer != VK_NULL_HANDLE);
871 ASSERT(regionCount == 1);
872 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
873 }
874
875 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
876 VkImageLayout srcImageLayout,
877 VkBuffer dstBuffer,
878 uint32_t regionCount,
879 const VkBufferImageCopy *regions)
880 {
881 ASSERT(valid() && srcImage.valid());
882 ASSERT(dstBuffer != VK_NULL_HANDLE);
883 ASSERT(regionCount == 1);
884 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
885 }
886
887 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
888 VkImageLayout imageLayout,
889 const VkClearColorValue &color,
890 uint32_t rangeCount,
891 const VkImageSubresourceRange *ranges)
892 {
893 ASSERT(valid());
894 ASSERT(rangeCount == 1);
895 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
896 }
897
898 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
899 const Image &image,
900 VkImageLayout imageLayout,
901 const VkClearDepthStencilValue &depthStencil,
902 uint32_t rangeCount,
903 const VkImageSubresourceRange *ranges)
904 {
905 ASSERT(valid());
906 ASSERT(rangeCount == 1);
907 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
908 }
909
910 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
911 const VkClearAttachment *attachments,
912 uint32_t rectCount,
913 const VkClearRect *rects)
914 {
915 ASSERT(valid());
916 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
917 }
918
919 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
920 VkImageLayout srcImageLayout,
921 const Image &dstImage,
922 VkImageLayout dstImageLayout,
923 uint32_t regionCount,
924 const VkImageCopy *regions)
925 {
926 ASSERT(valid() && srcImage.valid() && dstImage.valid());
927 ASSERT(regionCount == 1);
928 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
929 dstImageLayout, 1, regions);
930 }
931
932 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
933 VkSubpassContents subpassContents)
934 {
935 ASSERT(valid());
936 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
937 }
938
939 ANGLE_INLINE void CommandBuffer::endRenderPass()
940 {
941 ASSERT(mHandle != VK_NULL_HANDLE);
942 vkCmdEndRenderPass(mHandle);
943 }
944
945 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
946 VkDeviceSize offset,
947 VkIndexType indexType)
948 {
949 ASSERT(valid());
950 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
951 }
952
953 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
954 VkPipelineBindPoint pipelineBindPoint,
955 DescriptorSetIndex firstSet,
956 uint32_t descriptorSetCount,
957 const VkDescriptorSet *descriptorSets,
958 uint32_t dynamicOffsetCount,
959 const uint32_t *dynamicOffsets)
960 {
961 ASSERT(valid() && layout.valid());
962 vkCmdBindDescriptorSets(this->mHandle, pipelineBindPoint, layout.getHandle(),
963 ToUnderlying(firstSet), descriptorSetCount, descriptorSets,
964 dynamicOffsetCount, dynamicOffsets);
965 }
966
967 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
968 const CommandBuffer *commandBuffers)
969 {
970 ASSERT(valid());
971 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
972 }
973
974 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
975 size_t *allocatedMemoryOut) const
976 {
977 // No data available.
978 *usedMemoryOut = 0;
979 *allocatedMemoryOut = 1;
980 }
981
982 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
983 VkDeviceSize dstOffset,
984 VkDeviceSize size,
985 uint32_t data)
986 {
987 ASSERT(valid());
988 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
989 }
990
991 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
992 VkShaderStageFlags flag,
993 uint32_t offset,
994 uint32_t size,
995 const void *data)
996 {
997 ASSERT(valid() && layout.valid());
998 ASSERT(offset == 0);
999 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
1000 }
1001
1002 ANGLE_INLINE void CommandBuffer::setBlendConstants(const float blendConstants[4])
1003 {
1004 ASSERT(valid());
1005 vkCmdSetBlendConstants(mHandle, blendConstants);
1006 }
1007
1008 ANGLE_INLINE void CommandBuffer::setCullMode(VkCullModeFlags cullMode)
1009 {
1010 ASSERT(valid());
1011 vkCmdSetCullModeEXT(mHandle, cullMode);
1012 }
1013
1014 ANGLE_INLINE void CommandBuffer::setDepthBias(float depthBiasConstantFactor,
1015 float depthBiasClamp,
1016 float depthBiasSlopeFactor)
1017 {
1018 ASSERT(valid());
1019 vkCmdSetDepthBias(mHandle, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1020 }
1021
1022 ANGLE_INLINE void CommandBuffer::setDepthBiasEnable(VkBool32 depthBiasEnable)
1023 {
1024 ASSERT(valid());
1025 vkCmdSetDepthBiasEnableEXT(mHandle, depthBiasEnable);
1026 }
1027
1028 ANGLE_INLINE void CommandBuffer::setDepthCompareOp(VkCompareOp depthCompareOp)
1029 {
1030 ASSERT(valid());
1031 vkCmdSetDepthCompareOpEXT(mHandle, depthCompareOp);
1032 }
1033
1034 ANGLE_INLINE void CommandBuffer::setDepthTestEnable(VkBool32 depthTestEnable)
1035 {
1036 ASSERT(valid());
1037 vkCmdSetDepthTestEnableEXT(mHandle, depthTestEnable);
1038 }
1039
1040 ANGLE_INLINE void CommandBuffer::setDepthWriteEnable(VkBool32 depthWriteEnable)
1041 {
1042 ASSERT(valid());
1043 vkCmdSetDepthWriteEnableEXT(mHandle, depthWriteEnable);
1044 }
1045
1046 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
1047 {
1048 ASSERT(valid() && event != VK_NULL_HANDLE);
1049 vkCmdSetEvent(mHandle, event, stageMask);
1050 }
1051
1052 ANGLE_INLINE void CommandBuffer::setFragmentShadingRate(const VkExtent2D *fragmentSize,
1053 VkFragmentShadingRateCombinerOpKHR ops[2])
1054 {
1055 ASSERT(valid() && fragmentSize != nullptr);
1056 vkCmdSetFragmentShadingRateKHR(mHandle, fragmentSize, ops);
1057 }
1058
1059 ANGLE_INLINE void CommandBuffer::setFrontFace(VkFrontFace frontFace)
1060 {
1061 ASSERT(valid());
1062 vkCmdSetFrontFaceEXT(mHandle, frontFace);
1063 }
1064
1065 ANGLE_INLINE void CommandBuffer::setLineWidth(float lineWidth)
1066 {
1067 ASSERT(valid());
1068 vkCmdSetLineWidth(mHandle, lineWidth);
1069 }
1070
1071 ANGLE_INLINE void CommandBuffer::setLogicOp(VkLogicOp logicOp)
1072 {
1073 ASSERT(valid());
1074 vkCmdSetLogicOpEXT(mHandle, logicOp);
1075 }
1076
1077 ANGLE_INLINE void CommandBuffer::setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable)
1078 {
1079 ASSERT(valid());
1080 vkCmdSetPrimitiveRestartEnableEXT(mHandle, primitiveRestartEnable);
1081 }
1082
1083 ANGLE_INLINE void CommandBuffer::setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable)
1084 {
1085 ASSERT(valid());
1086 vkCmdSetRasterizerDiscardEnableEXT(mHandle, rasterizerDiscardEnable);
1087 }
1088
1089 ANGLE_INLINE void CommandBuffer::setScissor(uint32_t firstScissor,
1090 uint32_t scissorCount,
1091 const VkRect2D *scissors)
1092 {
1093 ASSERT(valid() && scissors != nullptr);
1094 vkCmdSetScissor(mHandle, firstScissor, scissorCount, scissors);
1095 }
1096
1097 ANGLE_INLINE void CommandBuffer::setStencilCompareMask(uint32_t compareFrontMask,
1098 uint32_t compareBackMask)
1099 {
1100 ASSERT(valid());
1101 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, compareFrontMask);
1102 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_BACK_BIT, compareBackMask);
1103 }
1104
1105 ANGLE_INLINE void CommandBuffer::setStencilOp(VkStencilFaceFlags faceMask,
1106 VkStencilOp failOp,
1107 VkStencilOp passOp,
1108 VkStencilOp depthFailOp,
1109 VkCompareOp compareOp)
1110 {
1111 ASSERT(valid());
1112 vkCmdSetStencilOpEXT(mHandle, faceMask, failOp, passOp, depthFailOp, compareOp);
1113 }
1114
1115 ANGLE_INLINE void CommandBuffer::setStencilReference(uint32_t frontReference,
1116 uint32_t backReference)
1117 {
1118 ASSERT(valid());
1119 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_FRONT_BIT, frontReference);
1120 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_BACK_BIT, backReference);
1121 }
1122
1123 ANGLE_INLINE void CommandBuffer::setStencilTestEnable(VkBool32 stencilTestEnable)
1124 {
1125 ASSERT(valid());
1126 vkCmdSetStencilTestEnableEXT(mHandle, stencilTestEnable);
1127 }
1128
1129 ANGLE_INLINE void CommandBuffer::setStencilWriteMask(uint32_t writeFrontMask,
1130 uint32_t writeBackMask)
1131 {
1132 ASSERT(valid());
1133 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, writeFrontMask);
1134 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_BACK_BIT, writeBackMask);
1135 }
1136
1137 ANGLE_INLINE void CommandBuffer::setVertexInput(
1138 uint32_t vertexBindingDescriptionCount,
1139 const VkVertexInputBindingDescription2EXT *VertexBindingDescriptions,
1140 uint32_t vertexAttributeDescriptionCount,
1141 const VkVertexInputAttributeDescription2EXT *VertexAttributeDescriptions)
1142 {
1143 ASSERT(valid());
1144 vkCmdSetVertexInputEXT(mHandle, vertexBindingDescriptionCount, VertexBindingDescriptions,
1145 vertexAttributeDescriptionCount, VertexAttributeDescriptions);
1146 }
1147
1148 ANGLE_INLINE void CommandBuffer::setViewport(uint32_t firstViewport,
1149 uint32_t viewportCount,
1150 const VkViewport *viewports)
1151 {
1152 ASSERT(valid() && viewports != nullptr);
1153 vkCmdSetViewport(mHandle, firstViewport, viewportCount, viewports);
1154 }
1155
1156 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
1157 {
1158 ASSERT(valid() && event != VK_NULL_HANDLE);
1159 vkCmdResetEvent(mHandle, event, stageMask);
1160 }
1161
1162 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
1163 const VkEvent *events,
1164 VkPipelineStageFlags srcStageMask,
1165 VkPipelineStageFlags dstStageMask,
1166 uint32_t memoryBarrierCount,
1167 const VkMemoryBarrier *memoryBarriers,
1168 uint32_t bufferMemoryBarrierCount,
1169 const VkBufferMemoryBarrier *bufferMemoryBarriers,
1170 uint32_t imageMemoryBarrierCount,
1171 const VkImageMemoryBarrier *imageMemoryBarriers)
1172 {
1173 ASSERT(valid());
1174 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
1175 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
1176 imageMemoryBarrierCount, imageMemoryBarriers);
1177 }
1178
1179 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
1180 uint32_t firstQuery,
1181 uint32_t queryCount)
1182 {
1183 ASSERT(valid() && queryPool.valid());
1184 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
1185 }
1186
1187 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
1188 VkImageLayout srcImageLayout,
1189 const Image &dstImage,
1190 VkImageLayout dstImageLayout,
1191 uint32_t regionCount,
1192 const VkImageResolve *regions)
1193 {
1194 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1195 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1196 dstImageLayout, regionCount, regions);
1197 }
1198
1199 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
1200 uint32_t query,
1201 VkQueryControlFlags flags)
1202 {
1203 ASSERT(valid() && queryPool.valid());
1204 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
1205 }
1206
1207 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1208 {
1209 ASSERT(valid() && queryPool.valid());
1210 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1211 }
1212
1213 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1214 const QueryPool &queryPool,
1215 uint32_t query)
1216 {
1217 ASSERT(valid());
1218 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1219 }
1220
1221 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1222 uint32_t instanceCount,
1223 uint32_t firstVertex,
1224 uint32_t firstInstance)
1225 {
1226 ASSERT(valid());
1227 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1228 }
1229
1230 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1231 uint32_t instanceCount,
1232 uint32_t firstIndex,
1233 int32_t vertexOffset,
1234 uint32_t firstInstance)
1235 {
1236 ASSERT(valid());
1237 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1238 }
1239
1240 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1241 VkDeviceSize offset,
1242 uint32_t drawCount,
1243 uint32_t stride)
1244 {
1245 ASSERT(valid());
1246 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1247 }
1248
1249 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1250 VkDeviceSize offset,
1251 uint32_t drawCount,
1252 uint32_t stride)
1253 {
1254 ASSERT(valid());
1255 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1256 }
1257
1258 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1259 uint32_t groupCountY,
1260 uint32_t groupCountZ)
1261 {
1262 ASSERT(valid());
1263 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1264 }
1265
1266 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1267 {
1268 ASSERT(valid());
1269 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1270 }
1271
1272 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1273 const Pipeline &pipeline)
1274 {
1275 ASSERT(valid() && pipeline.valid());
1276 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1277 }
1278
1279 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1280 {
1281 ASSERT(valid() && pipeline.valid());
1282 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1283 }
1284
1285 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1286 {
1287 ASSERT(valid() && pipeline.valid());
1288 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1289 }
1290
1291 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1292 uint32_t bindingCount,
1293 const VkBuffer *buffers,
1294 const VkDeviceSize *offsets)
1295 {
1296 ASSERT(valid());
1297 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1298 }
1299
1300 ANGLE_INLINE void CommandBuffer::bindVertexBuffers2(uint32_t firstBinding,
1301 uint32_t bindingCount,
1302 const VkBuffer *buffers,
1303 const VkDeviceSize *offsets,
1304 const VkDeviceSize *sizes,
1305 const VkDeviceSize *strides)
1306 {
1307 ASSERT(valid());
1308 vkCmdBindVertexBuffers2EXT(mHandle, firstBinding, bindingCount, buffers, offsets, sizes,
1309 strides);
1310 }
1311
1312 ANGLE_INLINE void CommandBuffer::beginTransformFeedback(uint32_t firstCounterBuffer,
1313 uint32_t counterBufferCount,
1314 const VkBuffer *counterBuffers,
1315 const VkDeviceSize *counterBufferOffsets)
1316 {
1317 ASSERT(valid());
1318 ASSERT(vkCmdBeginTransformFeedbackEXT);
1319 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1320 counterBufferOffsets);
1321 }
1322
1323 ANGLE_INLINE void CommandBuffer::endTransformFeedback(uint32_t firstCounterBuffer,
1324 uint32_t counterBufferCount,
1325 const VkBuffer *counterBuffers,
1326 const VkDeviceSize *counterBufferOffsets)
1327 {
1328 ASSERT(valid());
1329 ASSERT(vkCmdEndTransformFeedbackEXT);
1330 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1331 counterBufferOffsets);
1332 }
1333
1334 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffers(uint32_t firstBinding,
1335 uint32_t bindingCount,
1336 const VkBuffer *buffers,
1337 const VkDeviceSize *offsets,
1338 const VkDeviceSize *sizes)
1339 {
1340 ASSERT(valid());
1341 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1342 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1343 sizes);
1344 }
1345
1346 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1347 {
1348 ASSERT(valid());
1349 {
1350 #if !defined(ANGLE_SHARED_LIBVULKAN)
1351 // When the vulkan-loader is statically linked, we need to use the extension
1352 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1353 // with volk, this will default to the function definitions with no namespace
1354 using rx::vkCmdBeginDebugUtilsLabelEXT;
1355 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1356 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1357 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1358 }
1359 }
1360
1361 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1362 {
1363 ASSERT(valid());
1364 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1365 vkCmdEndDebugUtilsLabelEXT(mHandle);
1366 }
1367
1368 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1369 {
1370 ASSERT(valid());
1371 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1372 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1373 }
1374 } // namespace priv
1375
1376 // Image implementation.
1377 ANGLE_INLINE void Image::setHandle(VkImage handle)
1378 {
1379 mHandle = handle;
1380 }
1381
1382 ANGLE_INLINE void Image::reset()
1383 {
1384 mHandle = VK_NULL_HANDLE;
1385 }
1386
1387 ANGLE_INLINE void Image::destroy(VkDevice device)
1388 {
1389 if (valid())
1390 {
1391 vkDestroyImage(device, mHandle, nullptr);
1392 mHandle = VK_NULL_HANDLE;
1393 }
1394 }
1395
1396 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1397 {
1398 ASSERT(!valid());
1399 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1400 }
1401
1402 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1403 VkMemoryRequirements *requirementsOut) const
1404 {
1405 ASSERT(valid());
1406 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1407 }
1408
1409 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1410 {
1411 ASSERT(valid() && deviceMemory.valid());
1412 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1413 }
1414
1415 ANGLE_INLINE VkResult Image::bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo)
1416 {
1417 ASSERT(valid());
1418 return vkBindImageMemory2(device, 1, &bindInfo);
1419 }
1420
1421 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1422 VkImageAspectFlagBits aspectMask,
1423 uint32_t mipLevel,
1424 uint32_t arrayLayer,
1425 VkSubresourceLayout *outSubresourceLayout) const
1426 {
1427 VkImageSubresource subresource = {};
1428 subresource.aspectMask = aspectMask;
1429 subresource.mipLevel = mipLevel;
1430 subresource.arrayLayer = arrayLayer;
1431
1432 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1433 }
1434
1435 // ImageView implementation.
1436 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1437 {
1438 if (valid())
1439 {
1440 vkDestroyImageView(device, mHandle, nullptr);
1441 mHandle = VK_NULL_HANDLE;
1442 }
1443 }
1444
1445 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1446 {
1447 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1448 }
1449
1450 // Semaphore implementation.
1451 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1452 {
1453 if (valid())
1454 {
1455 vkDestroySemaphore(device, mHandle, nullptr);
1456 mHandle = VK_NULL_HANDLE;
1457 }
1458 }
1459
1460 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1461 {
1462 ASSERT(!valid());
1463
1464 VkSemaphoreCreateInfo semaphoreInfo = {};
1465 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1466 semaphoreInfo.flags = 0;
1467
1468 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1469 }
1470
1471 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1472 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1473 {
1474 ASSERT(valid());
1475 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1476 }
1477
1478 // Framebuffer implementation.
1479 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1480 {
1481 if (valid())
1482 {
1483 vkDestroyFramebuffer(device, mHandle, nullptr);
1484 mHandle = VK_NULL_HANDLE;
1485 }
1486 }
1487
1488 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1489 {
1490 ASSERT(!valid());
1491 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1492 }
1493
1494 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1495 {
1496 mHandle = handle;
1497 }
1498
1499 // DeviceMemory implementation.
1500 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1501 {
1502 if (valid())
1503 {
1504 vkFreeMemory(device, mHandle, nullptr);
1505 mHandle = VK_NULL_HANDLE;
1506 }
1507 }
1508
1509 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1510 {
1511 ASSERT(!valid());
1512 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1513 }
1514
1515 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1516 VkDeviceSize offset,
1517 VkDeviceSize size,
1518 VkMemoryMapFlags flags,
1519 uint8_t **mapPointer) const
1520 {
1521 ASSERT(valid());
1522 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1523 }
1524
1525 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1526 {
1527 ASSERT(valid());
1528 vkUnmapMemory(device, mHandle);
1529 }
1530
1531 ANGLE_INLINE void DeviceMemory::flush(VkDevice device, VkMappedMemoryRange &memRange)
1532 {
1533 vkFlushMappedMemoryRanges(device, 1, &memRange);
1534 }
1535
1536 ANGLE_INLINE void DeviceMemory::invalidate(VkDevice device, VkMappedMemoryRange &memRange)
1537 {
1538 vkInvalidateMappedMemoryRanges(device, 1, &memRange);
1539 }
1540
1541 // Allocator implementation.
1542 ANGLE_INLINE void Allocator::destroy()
1543 {
1544 if (valid())
1545 {
1546 vma::DestroyAllocator(mHandle);
1547 mHandle = VK_NULL_HANDLE;
1548 }
1549 }
1550
1551 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1552 VkDevice device,
1553 VkInstance instance,
1554 uint32_t apiVersion,
1555 VkDeviceSize preferredLargeHeapBlockSize)
1556 {
1557 ASSERT(!valid());
1558 return vma::InitAllocator(physicalDevice, device, instance, apiVersion,
1559 preferredLargeHeapBlockSize, &mHandle);
1560 }
1561
1562 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1563 VkMemoryPropertyFlags requiredFlags,
1564 VkMemoryPropertyFlags preferredFlags,
1565 bool persistentlyMappedBuffers,
1566 uint32_t *memoryTypeIndexOut,
1567 Buffer *bufferOut,
1568 Allocation *allocationOut) const
1569 {
1570 ASSERT(valid());
1571 ASSERT(bufferOut && !bufferOut->valid());
1572 ASSERT(allocationOut && !allocationOut->valid());
1573 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1574 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1575 &allocationOut->mHandle);
1576 }
1577
1578 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1579 VkMemoryPropertyFlags *flagsOut) const
1580 {
1581 ASSERT(valid());
1582 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1583 }
1584
1585 ANGLE_INLINE VkResult
1586 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1587 VkMemoryPropertyFlags requiredFlags,
1588 VkMemoryPropertyFlags preferredFlags,
1589 bool persistentlyMappedBuffers,
1590 uint32_t *memoryTypeIndexOut) const
1591 {
1592 ASSERT(valid());
1593 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1594 preferredFlags, persistentlyMappedBuffers,
1595 memoryTypeIndexOut);
1596 }
1597
1598 ANGLE_INLINE void Allocator::buildStatsString(char **statsString, VkBool32 detailedMap)
1599 {
1600 ASSERT(valid());
1601 vma::BuildStatsString(mHandle, statsString, detailedMap);
1602 }
1603
1604 ANGLE_INLINE void Allocator::freeStatsString(char *statsString)
1605 {
1606 ASSERT(valid());
1607 vma::FreeStatsString(mHandle, statsString);
1608 }
1609
1610 // Allocation implementation.
1611 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1612 {
1613 if (valid())
1614 {
1615 vma::FreeMemory(allocator.getHandle(), mHandle);
1616 mHandle = VK_NULL_HANDLE;
1617 }
1618 }
1619
1620 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1621 {
1622 ASSERT(valid());
1623 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1624 }
1625
1626 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1627 {
1628 ASSERT(valid());
1629 vma::UnmapMemory(allocator.getHandle(), mHandle);
1630 }
1631
1632 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1633 VkDeviceSize offset,
1634 VkDeviceSize size) const
1635 {
1636 ASSERT(valid());
1637 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1638 }
1639
1640 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1641 VkDeviceSize offset,
1642 VkDeviceSize size) const
1643 {
1644 ASSERT(valid());
1645 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1646 }
1647
1648 // RenderPass implementation.
1649 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1650 {
1651 if (valid())
1652 {
1653 vkDestroyRenderPass(device, mHandle, nullptr);
1654 mHandle = VK_NULL_HANDLE;
1655 }
1656 }
1657
1658 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1659 {
1660 ASSERT(!valid());
1661 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1662 }
1663
1664 ANGLE_INLINE VkResult RenderPass::init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo)
1665 {
1666 ASSERT(!valid());
1667 return vkCreateRenderPass2KHR(device, &createInfo, nullptr, &mHandle);
1668 }
1669
1670 // Buffer implementation.
1671 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1672 {
1673 if (valid())
1674 {
1675 vkDestroyBuffer(device, mHandle, nullptr);
1676 mHandle = VK_NULL_HANDLE;
1677 }
1678 }
1679
1680 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1681 {
1682 ASSERT(!valid());
1683 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1684 }
1685
1686 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device,
1687 const DeviceMemory &deviceMemory,
1688 VkDeviceSize offset)
1689 {
1690 ASSERT(valid() && deviceMemory.valid());
1691 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), offset);
1692 }
1693
1694 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1695 VkMemoryRequirements *memoryRequirementsOut)
1696 {
1697 ASSERT(valid());
1698 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1699 }
1700
1701 // BufferView implementation.
1702 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1703 {
1704 if (valid())
1705 {
1706 vkDestroyBufferView(device, mHandle, nullptr);
1707 mHandle = VK_NULL_HANDLE;
1708 }
1709 }
1710
1711 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1712 {
1713 ASSERT(!valid());
1714 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1715 }
1716
1717 // ShaderModule implementation.
1718 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1719 {
1720 if (mHandle != VK_NULL_HANDLE)
1721 {
1722 vkDestroyShaderModule(device, mHandle, nullptr);
1723 mHandle = VK_NULL_HANDLE;
1724 }
1725 }
1726
1727 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1728 const VkShaderModuleCreateInfo &createInfo)
1729 {
1730 ASSERT(!valid());
1731 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1732 }
1733
1734 // PipelineLayout implementation.
1735 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1736 {
1737 if (valid())
1738 {
1739 vkDestroyPipelineLayout(device, mHandle, nullptr);
1740 mHandle = VK_NULL_HANDLE;
1741 }
1742 }
1743
1744 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1745 const VkPipelineLayoutCreateInfo &createInfo)
1746 {
1747 ASSERT(!valid());
1748 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1749 }
1750
1751 // PipelineCache implementation.
1752 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1753 {
1754 if (valid())
1755 {
1756 vkDestroyPipelineCache(device, mHandle, nullptr);
1757 mHandle = VK_NULL_HANDLE;
1758 }
1759 }
1760
1761 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1762 const VkPipelineCacheCreateInfo &createInfo)
1763 {
1764 ASSERT(!valid());
1765 // Note: if we are concerned with memory usage of this cache, we should give it custom
1766 // allocators. Also, failure of this function is of little importance.
1767 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1768 }
1769
1770 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1771 uint32_t srcCacheCount,
1772 const VkPipelineCache *srcCaches) const
1773 {
1774 ASSERT(valid());
1775 return vkMergePipelineCaches(device, mHandle, srcCacheCount, srcCaches);
1776 }
1777
1778 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1779 size_t *cacheSize,
1780 void *cacheData) const
1781 {
1782 ASSERT(valid());
1783
1784 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1785 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1786 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1787 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1788 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1789 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1790 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1791 }
1792
1793 // Pipeline implementation.
1794 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1795 {
1796 if (valid())
1797 {
1798 vkDestroyPipeline(device, mHandle, nullptr);
1799 mHandle = VK_NULL_HANDLE;
1800 }
1801 }
1802
1803 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1804 const VkGraphicsPipelineCreateInfo &createInfo,
1805 const PipelineCache &pipelineCacheVk)
1806 {
1807 ASSERT(!valid());
1808 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1809 &mHandle);
1810 }
1811
1812 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1813 const VkComputePipelineCreateInfo &createInfo,
1814 const PipelineCache &pipelineCacheVk)
1815 {
1816 ASSERT(!valid());
1817 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1818 &mHandle);
1819 }
1820
1821 // DescriptorSetLayout implementation.
1822 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1823 {
1824 if (valid())
1825 {
1826 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1827 mHandle = VK_NULL_HANDLE;
1828 }
1829 }
1830
1831 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1832 const VkDescriptorSetLayoutCreateInfo &createInfo)
1833 {
1834 ASSERT(!valid());
1835 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1836 }
1837
1838 // DescriptorPool implementation.
1839 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1840 {
1841 if (valid())
1842 {
1843 vkDestroyDescriptorPool(device, mHandle, nullptr);
1844 mHandle = VK_NULL_HANDLE;
1845 }
1846 }
1847
1848 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1849 const VkDescriptorPoolCreateInfo &createInfo)
1850 {
1851 ASSERT(!valid());
1852 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1853 }
1854
1855 ANGLE_INLINE VkResult
1856 DescriptorPool::allocateDescriptorSets(VkDevice device,
1857 const VkDescriptorSetAllocateInfo &allocInfo,
1858 VkDescriptorSet *descriptorSetsOut)
1859 {
1860 ASSERT(valid());
1861 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1862 }
1863
1864 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1865 uint32_t descriptorSetCount,
1866 const VkDescriptorSet *descriptorSets)
1867 {
1868 ASSERT(valid());
1869 ASSERT(descriptorSetCount > 0);
1870 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1871 }
1872
1873 // Sampler implementation.
1874 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1875 {
1876 if (valid())
1877 {
1878 vkDestroySampler(device, mHandle, nullptr);
1879 mHandle = VK_NULL_HANDLE;
1880 }
1881 }
1882
1883 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1884 {
1885 ASSERT(!valid());
1886 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1887 }
1888
1889 // SamplerYuvConversion implementation.
1890 ANGLE_INLINE void SamplerYcbcrConversion::destroy(VkDevice device)
1891 {
1892 if (valid())
1893 {
1894 vkDestroySamplerYcbcrConversion(device, mHandle, nullptr);
1895 mHandle = VK_NULL_HANDLE;
1896 }
1897 }
1898
1899 ANGLE_INLINE VkResult
1900 SamplerYcbcrConversion::init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo)
1901 {
1902 ASSERT(!valid());
1903 return vkCreateSamplerYcbcrConversion(device, &createInfo, nullptr, &mHandle);
1904 }
1905
1906 // Event implementation.
1907 ANGLE_INLINE void Event::destroy(VkDevice device)
1908 {
1909 if (valid())
1910 {
1911 vkDestroyEvent(device, mHandle, nullptr);
1912 mHandle = VK_NULL_HANDLE;
1913 }
1914 }
1915
1916 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
1917 {
1918 ASSERT(!valid());
1919 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
1920 }
1921
1922 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
1923 {
1924 ASSERT(valid());
1925 return vkGetEventStatus(device, mHandle);
1926 }
1927
1928 ANGLE_INLINE VkResult Event::set(VkDevice device) const
1929 {
1930 ASSERT(valid());
1931 return vkSetEvent(device, mHandle);
1932 }
1933
1934 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
1935 {
1936 ASSERT(valid());
1937 return vkResetEvent(device, mHandle);
1938 }
1939
1940 // Fence implementation.
1941 ANGLE_INLINE void Fence::destroy(VkDevice device)
1942 {
1943 if (valid())
1944 {
1945 vkDestroyFence(device, mHandle, nullptr);
1946 mHandle = VK_NULL_HANDLE;
1947 }
1948 }
1949
1950 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
1951 {
1952 ASSERT(!valid());
1953 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
1954 }
1955
1956 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
1957 {
1958 ASSERT(valid());
1959 return vkResetFences(device, 1, &mHandle);
1960 }
1961
1962 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
1963 {
1964 ASSERT(valid());
1965 return vkGetFenceStatus(device, mHandle);
1966 }
1967
1968 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
1969 {
1970 ASSERT(valid());
1971 return vkWaitForFences(device, 1, &mHandle, true, timeout);
1972 }
1973
1974 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
1975 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
1976 {
1977 ASSERT(valid());
1978 return vkImportFenceFdKHR(device, &importFenceFdInfo);
1979 }
1980
1981 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
1982 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
1983 int *fdOut) const
1984 {
1985 ASSERT(valid());
1986 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
1987 }
1988
1989 // QueryPool implementation.
1990 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
1991 {
1992 if (valid())
1993 {
1994 vkDestroyQueryPool(device, mHandle, nullptr);
1995 mHandle = VK_NULL_HANDLE;
1996 }
1997 }
1998
1999 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
2000 {
2001 ASSERT(!valid());
2002 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
2003 }
2004
2005 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
2006 uint32_t firstQuery,
2007 uint32_t queryCount,
2008 size_t dataSize,
2009 void *data,
2010 VkDeviceSize stride,
2011 VkQueryResultFlags flags) const
2012 {
2013 ASSERT(valid());
2014 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
2015 flags);
2016 }
2017
2018 // VirtualBlock implementation.
2019 ANGLE_INLINE void VirtualBlock::destroy(VkDevice device)
2020 {
2021 if (valid())
2022 {
2023 vma::DestroyVirtualBlock(mHandle);
2024 mHandle = VK_NULL_HANDLE;
2025 }
2026 }
2027
2028 ANGLE_INLINE VkResult VirtualBlock::init(VkDevice device,
2029 vma::VirtualBlockCreateFlags flags,
2030 VkDeviceSize size)
2031 {
2032 return vma::CreateVirtualBlock(size, flags, &mHandle);
2033 }
2034
2035 ANGLE_INLINE VkResult VirtualBlock::allocate(VkDeviceSize size,
2036 VkDeviceSize alignment,
2037 VmaVirtualAllocation *allocationOut,
2038 VkDeviceSize *offsetOut)
2039 {
2040 return vma::VirtualAllocate(mHandle, size, alignment, allocationOut, offsetOut);
2041 }
2042
2043 ANGLE_INLINE void VirtualBlock::free(VmaVirtualAllocation allocation, VkDeviceSize offset)
2044 {
2045 vma::VirtualFree(mHandle, allocation, offset);
2046 }
2047
2048 ANGLE_INLINE void VirtualBlock::calculateStats(vma::StatInfo *pStatInfo) const
2049 {
2050 vma::CalculateVirtualBlockStats(mHandle, pStatInfo);
2051 }
2052 } // namespace vk
2053 } // namespace rx
2054
2055 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
2056