1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "common/vulkan/vk_headers.h"
15 #include "libANGLE/renderer/renderer_utils.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17 #include "libANGLE/trace.h"
18
19 namespace rx
20 {
21 enum class DescriptorSetIndex : uint32_t;
22
23 namespace vk
24 {
25 // Helper macros that apply to all the wrapped object types.
26 // Unimplemented handle types:
27 // Instance
28 // PhysicalDevice
29 // Device
30 // Queue
31 // DescriptorSet
32
33 #define ANGLE_HANDLE_TYPES_X(FUNC) \
34 FUNC(Allocation) \
35 FUNC(Allocator) \
36 FUNC(Buffer) \
37 FUNC(BufferBlock) \
38 FUNC(BufferView) \
39 FUNC(CommandPool) \
40 FUNC(DescriptorPool) \
41 FUNC(DescriptorSetLayout) \
42 FUNC(DeviceMemory) \
43 FUNC(Event) \
44 FUNC(Fence) \
45 FUNC(Framebuffer) \
46 FUNC(Image) \
47 FUNC(ImageView) \
48 FUNC(Pipeline) \
49 FUNC(PipelineCache) \
50 FUNC(PipelineLayout) \
51 FUNC(QueryPool) \
52 FUNC(RenderPass) \
53 FUNC(Sampler) \
54 FUNC(SamplerYcbcrConversion) \
55 FUNC(Semaphore) \
56 FUNC(ShaderModule)
57
58 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
59
60 enum class HandleType
61 {
62 Invalid,
63 CommandBuffer,
64 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
65 };
66
67 #undef ANGLE_COMMA_SEP_FUNC
68
69 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)70 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
71 namespace priv
72 {
73 class CommandBuffer;
74 } // namespace priv
75 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
76
77 // Returns the HandleType of a Vk Handle.
78 template <typename T>
79 struct HandleTypeHelper;
80
81 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
82 template <> \
83 struct HandleTypeHelper<TYPE> \
84 { \
85 constexpr static HandleType kHandleType = HandleType::TYPE; \
86 };
87
88 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
89 template <>
90 struct HandleTypeHelper<priv::CommandBuffer>
91 {
92 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
93 };
94
95 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
96
97 // Base class for all wrapped vulkan objects. Implements several common helper routines.
98 template <typename DerivedT, typename HandleT>
99 class WrappedObject : angle::NonCopyable
100 {
101 public:
102 HandleT getHandle() const { return mHandle; }
103 void setHandle(HandleT handle) { mHandle = handle; }
104 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
105
106 const HandleT *ptr() const { return &mHandle; }
107
108 HandleT release()
109 {
110 HandleT handle = mHandle;
111 mHandle = VK_NULL_HANDLE;
112 return handle;
113 }
114
115 protected:
116 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
117 ~WrappedObject() { ASSERT(!valid()); }
118
119 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
120 {
121 other.mHandle = VK_NULL_HANDLE;
122 }
123
124 // Only works to initialize empty objects, since we don't have the device handle.
125 WrappedObject &operator=(WrappedObject &&other)
126 {
127 ASSERT(!valid());
128 std::swap(mHandle, other.mHandle);
129 return *this;
130 }
131
132 HandleT mHandle;
133 };
134
135 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
136 {
137 public:
138 CommandPool() = default;
139
140 void destroy(VkDevice device);
141 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
142 void freeCommandBuffers(VkDevice device,
143 uint32_t commandBufferCount,
144 const VkCommandBuffer *commandBuffers);
145
146 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
147 };
148
149 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
150 {
151 public:
152 Pipeline() = default;
153 void destroy(VkDevice device);
154
155 VkResult initGraphics(VkDevice device,
156 const VkGraphicsPipelineCreateInfo &createInfo,
157 const PipelineCache &pipelineCacheVk);
158 VkResult initCompute(VkDevice device,
159 const VkComputePipelineCreateInfo &createInfo,
160 const PipelineCache &pipelineCacheVk);
161 };
162
163 namespace priv
164 {
165
166 // Helper class that wraps a Vulkan command buffer.
167 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
168 {
169 public:
170 CommandBuffer() = default;
171
172 VkCommandBuffer releaseHandle();
173
174 // This is used for normal pool allocated command buffers. It reset the handle.
175 void destroy(VkDevice device);
176
177 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
178 void destroy(VkDevice device, const CommandPool &commandPool);
179
180 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
181
182 using WrappedObject::operator=;
183
184 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
185 {
186 return (features.inheritedQueries == VK_TRUE);
187 }
188
189 // Vulkan command buffers are executed as secondary command buffers within a primary command
190 // buffer.
191 static constexpr bool ExecutesInline() { return false; }
192
193 VkResult begin(const VkCommandBufferBeginInfo &info);
194
195 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
196
197 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
198
199 void bindDescriptorSets(const PipelineLayout &layout,
200 VkPipelineBindPoint pipelineBindPoint,
201 DescriptorSetIndex firstSet,
202 uint32_t descriptorSetCount,
203 const VkDescriptorSet *descriptorSets,
204 uint32_t dynamicOffsetCount,
205 const uint32_t *dynamicOffsets);
206 void bindGraphicsPipeline(const Pipeline &pipeline);
207 void bindComputePipeline(const Pipeline &pipeline);
208 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
209
210 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
211 void bindVertexBuffers(uint32_t firstBinding,
212 uint32_t bindingCount,
213 const VkBuffer *buffers,
214 const VkDeviceSize *offsets);
215 void bindVertexBuffers2(uint32_t firstBinding,
216 uint32_t bindingCount,
217 const VkBuffer *buffers,
218 const VkDeviceSize *offsets,
219 const VkDeviceSize *sizes,
220 const VkDeviceSize *strides);
221
222 void blitImage(const Image &srcImage,
223 VkImageLayout srcImageLayout,
224 const Image &dstImage,
225 VkImageLayout dstImageLayout,
226 uint32_t regionCount,
227 const VkImageBlit *regions,
228 VkFilter filter);
229
230 void clearColorImage(const Image &image,
231 VkImageLayout imageLayout,
232 const VkClearColorValue &color,
233 uint32_t rangeCount,
234 const VkImageSubresourceRange *ranges);
235 void clearDepthStencilImage(const Image &image,
236 VkImageLayout imageLayout,
237 const VkClearDepthStencilValue &depthStencil,
238 uint32_t rangeCount,
239 const VkImageSubresourceRange *ranges);
240
241 void clearAttachments(uint32_t attachmentCount,
242 const VkClearAttachment *attachments,
243 uint32_t rectCount,
244 const VkClearRect *rects);
245
246 void copyBuffer(const Buffer &srcBuffer,
247 const Buffer &destBuffer,
248 uint32_t regionCount,
249 const VkBufferCopy *regions);
250
251 void copyBufferToImage(VkBuffer srcBuffer,
252 const Image &dstImage,
253 VkImageLayout dstImageLayout,
254 uint32_t regionCount,
255 const VkBufferImageCopy *regions);
256 void copyImageToBuffer(const Image &srcImage,
257 VkImageLayout srcImageLayout,
258 VkBuffer dstBuffer,
259 uint32_t regionCount,
260 const VkBufferImageCopy *regions);
261 void copyImage(const Image &srcImage,
262 VkImageLayout srcImageLayout,
263 const Image &dstImage,
264 VkImageLayout dstImageLayout,
265 uint32_t regionCount,
266 const VkImageCopy *regions);
267
268 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
269 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
270
271 void draw(uint32_t vertexCount,
272 uint32_t instanceCount,
273 uint32_t firstVertex,
274 uint32_t firstInstance);
275 void drawIndexed(uint32_t indexCount,
276 uint32_t instanceCount,
277 uint32_t firstIndex,
278 int32_t vertexOffset,
279 uint32_t firstInstance);
280 void drawIndexedIndirect(const Buffer &buffer,
281 VkDeviceSize offset,
282 uint32_t drawCount,
283 uint32_t stride);
284 void drawIndirect(const Buffer &buffer,
285 VkDeviceSize offset,
286 uint32_t drawCount,
287 uint32_t stride);
288
289 VkResult end();
290 void endQuery(const QueryPool &queryPool, uint32_t query);
291 void endRenderPass();
292 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
293
294 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
295
296 void fillBuffer(const Buffer &dstBuffer,
297 VkDeviceSize dstOffset,
298 VkDeviceSize size,
299 uint32_t data);
300
301 void imageBarrier(VkPipelineStageFlags srcStageMask,
302 VkPipelineStageFlags dstStageMask,
303 const VkImageMemoryBarrier &imageMemoryBarrier);
304
305 void nextSubpass(VkSubpassContents subpassContents);
306
307 void memoryBarrier(VkPipelineStageFlags srcStageMask,
308 VkPipelineStageFlags dstStageMask,
309 const VkMemoryBarrier *memoryBarrier);
310
311 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
312 VkPipelineStageFlags dstStageMask,
313 VkDependencyFlags dependencyFlags,
314 uint32_t memoryBarrierCount,
315 const VkMemoryBarrier *memoryBarriers,
316 uint32_t bufferMemoryBarrierCount,
317 const VkBufferMemoryBarrier *bufferMemoryBarriers,
318 uint32_t imageMemoryBarrierCount,
319 const VkImageMemoryBarrier *imageMemoryBarriers);
320
321 void pushConstants(const PipelineLayout &layout,
322 VkShaderStageFlags flag,
323 uint32_t offset,
324 uint32_t size,
325 const void *data);
326
327 void setBlendConstants(const float blendConstants[4]);
328 void setCullMode(VkCullModeFlags cullMode);
329 void setDepthBias(float depthBiasConstantFactor,
330 float depthBiasClamp,
331 float depthBiasSlopeFactor);
332 void setDepthBiasEnable(VkBool32 depthBiasEnable);
333 void setDepthCompareOp(VkCompareOp depthCompareOp);
334 void setDepthTestEnable(VkBool32 depthTestEnable);
335 void setDepthWriteEnable(VkBool32 depthWriteEnable);
336 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
337 void setFragmentShadingRate(const VkExtent2D *fragmentSize,
338 VkFragmentShadingRateCombinerOpKHR ops[2]);
339 void setFrontFace(VkFrontFace frontFace);
340 void setLineWidth(float lineWidth);
341 void setLogicOp(VkLogicOp logicOp);
342 void setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable);
343 void setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable);
344 void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *scissors);
345 void setStencilCompareMask(uint32_t compareFrontMask, uint32_t compareBackMask);
346 void setStencilOp(VkStencilFaceFlags faceMask,
347 VkStencilOp failOp,
348 VkStencilOp passOp,
349 VkStencilOp depthFailOp,
350 VkCompareOp compareOp);
351 void setStencilReference(uint32_t frontReference, uint32_t backReference);
352 void setStencilTestEnable(VkBool32 stencilTestEnable);
353 void setStencilWriteMask(uint32_t writeFrontMask, uint32_t writeBackMask);
354 void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *viewports);
355 VkResult reset();
356 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
357 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
358 void resolveImage(const Image &srcImage,
359 VkImageLayout srcImageLayout,
360 const Image &dstImage,
361 VkImageLayout dstImageLayout,
362 uint32_t regionCount,
363 const VkImageResolve *regions);
364 void waitEvents(uint32_t eventCount,
365 const VkEvent *events,
366 VkPipelineStageFlags srcStageMask,
367 VkPipelineStageFlags dstStageMask,
368 uint32_t memoryBarrierCount,
369 const VkMemoryBarrier *memoryBarriers,
370 uint32_t bufferMemoryBarrierCount,
371 const VkBufferMemoryBarrier *bufferMemoryBarriers,
372 uint32_t imageMemoryBarrierCount,
373 const VkImageMemoryBarrier *imageMemoryBarriers);
374
375 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
376 const QueryPool &queryPool,
377 uint32_t query);
378
379 // VK_EXT_transform_feedback
380 void beginTransformFeedback(uint32_t firstCounterBuffer,
381 uint32_t counterBufferCount,
382 const VkBuffer *counterBuffers,
383 const VkDeviceSize *counterBufferOffsets);
384 void endTransformFeedback(uint32_t firstCounterBuffer,
385 uint32_t counterBufferCount,
386 const VkBuffer *counterBuffers,
387 const VkDeviceSize *counterBufferOffsets);
388 void bindTransformFeedbackBuffers(uint32_t firstBinding,
389 uint32_t bindingCount,
390 const VkBuffer *buffers,
391 const VkDeviceSize *offsets,
392 const VkDeviceSize *sizes);
393
394 // VK_EXT_debug_utils
395 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
396 void endDebugUtilsLabelEXT();
397 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
398 };
399 } // namespace priv
400
401 using PrimaryCommandBuffer = priv::CommandBuffer;
402
403 class Image final : public WrappedObject<Image, VkImage>
404 {
405 public:
406 Image() = default;
407
408 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
409 void setHandle(VkImage handle);
410
411 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
412 void reset();
413
414 // Called on shutdown when the helper class *does* own the handle to the image resource.
415 void destroy(VkDevice device);
416
417 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
418
419 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
420 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
421 VkResult bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo);
422
423 void getSubresourceLayout(VkDevice device,
424 VkImageAspectFlagBits aspectMask,
425 uint32_t mipLevel,
426 uint32_t arrayLayer,
427 VkSubresourceLayout *outSubresourceLayout) const;
428
429 private:
430 friend class ImageMemorySuballocator;
431 };
432
433 class ImageView final : public WrappedObject<ImageView, VkImageView>
434 {
435 public:
436 ImageView() = default;
437 void destroy(VkDevice device);
438
439 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
440 };
441
442 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
443 {
444 public:
445 Semaphore() = default;
446 void destroy(VkDevice device);
447
448 VkResult init(VkDevice device);
449 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
450 };
451
452 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
453 {
454 public:
455 Framebuffer() = default;
456 void destroy(VkDevice device);
457
458 // Use this method only in necessary cases. (RenderPass)
459 void setHandle(VkFramebuffer handle);
460
461 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
462 };
463
464 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
465 {
466 public:
467 DeviceMemory() = default;
468 void destroy(VkDevice device);
469
470 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
471 VkResult map(VkDevice device,
472 VkDeviceSize offset,
473 VkDeviceSize size,
474 VkMemoryMapFlags flags,
475 uint8_t **mapPointer) const;
476 void unmap(VkDevice device) const;
477 void flush(VkDevice device, VkMappedMemoryRange &memRange);
478 void invalidate(VkDevice device, VkMappedMemoryRange &memRange);
479 };
480
481 class Allocator : public WrappedObject<Allocator, VmaAllocator>
482 {
483 public:
484 Allocator() = default;
485 void destroy();
486
487 VkResult init(VkPhysicalDevice physicalDevice,
488 VkDevice device,
489 VkInstance instance,
490 uint32_t apiVersion,
491 VkDeviceSize preferredLargeHeapBlockSize);
492
493 // Initializes the buffer handle and memory allocation.
494 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
495 VkMemoryPropertyFlags requiredFlags,
496 VkMemoryPropertyFlags preferredFlags,
497 bool persistentlyMappedBuffers,
498 uint32_t *memoryTypeIndexOut,
499 Buffer *bufferOut,
500 Allocation *allocationOut) const;
501
502 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
503 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
504 VkMemoryPropertyFlags requiredFlags,
505 VkMemoryPropertyFlags preferredFlags,
506 bool persistentlyMappedBuffers,
507 uint32_t *memoryTypeIndexOut) const;
508
509 void buildStatsString(char **statsString, VkBool32 detailedMap);
510 void freeStatsString(char *statsString);
511 };
512
513 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
514 {
515 public:
516 Allocation() = default;
517 void destroy(const Allocator &allocator);
518
519 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
520 void unmap(const Allocator &allocator) const;
521 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
522 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
523
524 private:
525 friend class Allocator;
526 friend class ImageMemorySuballocator;
527 };
528
529 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
530 {
531 public:
532 RenderPass() = default;
533 void destroy(VkDevice device);
534
535 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
536 VkResult init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo);
537 };
538
539 enum class StagingUsage
540 {
541 Read,
542 Write,
543 Both,
544 };
545
546 class Buffer final : public WrappedObject<Buffer, VkBuffer>
547 {
548 public:
549 Buffer() = default;
550 void destroy(VkDevice device);
551
552 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
553 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory, VkDeviceSize offset);
554 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
555
556 private:
557 friend class Allocator;
558 };
559
560 class BufferView final : public WrappedObject<BufferView, VkBufferView>
561 {
562 public:
563 BufferView() = default;
564 void destroy(VkDevice device);
565
566 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
567 };
568
569 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
570 {
571 public:
572 ShaderModule() = default;
573 void destroy(VkDevice device);
574
575 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
576 };
577
578 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
579 {
580 public:
581 PipelineLayout() = default;
582 void destroy(VkDevice device);
583
584 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
585 };
586
587 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
588 {
589 public:
590 PipelineCache() = default;
591 void destroy(VkDevice device);
592
593 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
594 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData) const;
595 VkResult merge(VkDevice device, uint32_t srcCacheCount, const VkPipelineCache *srcCaches) const;
596 };
597
598 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
599 {
600 public:
601 DescriptorSetLayout() = default;
602 void destroy(VkDevice device);
603
604 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
605 };
606
607 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
608 {
609 public:
610 DescriptorPool() = default;
611 void destroy(VkDevice device);
612
613 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
614
615 VkResult allocateDescriptorSets(VkDevice device,
616 const VkDescriptorSetAllocateInfo &allocInfo,
617 VkDescriptorSet *descriptorSetsOut);
618 VkResult freeDescriptorSets(VkDevice device,
619 uint32_t descriptorSetCount,
620 const VkDescriptorSet *descriptorSets);
621 };
622
623 class Sampler final : public WrappedObject<Sampler, VkSampler>
624 {
625 public:
626 Sampler() = default;
627 void destroy(VkDevice device);
628 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
629 };
630
631 class SamplerYcbcrConversion final
632 : public WrappedObject<SamplerYcbcrConversion, VkSamplerYcbcrConversion>
633 {
634 public:
635 SamplerYcbcrConversion() = default;
636 void destroy(VkDevice device);
637 VkResult init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo);
638 };
639
640 class Event final : public WrappedObject<Event, VkEvent>
641 {
642 public:
643 Event() = default;
644 void destroy(VkDevice device);
645 using WrappedObject::operator=;
646
647 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
648 VkResult getStatus(VkDevice device) const;
649 VkResult set(VkDevice device) const;
650 VkResult reset(VkDevice device) const;
651 };
652
653 class Fence final : public WrappedObject<Fence, VkFence>
654 {
655 public:
656 Fence() = default;
657 void destroy(VkDevice device);
658 using WrappedObject::operator=;
659
660 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
661 VkResult reset(VkDevice device);
662 VkResult getStatus(VkDevice device) const;
663 VkResult wait(VkDevice device, uint64_t timeout) const;
664 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
665 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
666 };
667
668 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
669 {
670 public:
671 QueryPool() = default;
672 void destroy(VkDevice device);
673
674 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
675 VkResult getResults(VkDevice device,
676 uint32_t firstQuery,
677 uint32_t queryCount,
678 size_t dataSize,
679 void *data,
680 VkDeviceSize stride,
681 VkQueryResultFlags flags) const;
682 };
683
684 // VirtualBlock
685 class VirtualBlock final : public WrappedObject<VirtualBlock, VmaVirtualBlock>
686 {
687 public:
688 VirtualBlock() = default;
689 void destroy(VkDevice device);
690 VkResult init(VkDevice device, vma::VirtualBlockCreateFlags flags, VkDeviceSize size);
691
692 VkResult allocate(VkDeviceSize size,
693 VkDeviceSize alignment,
694 VmaVirtualAllocation *allocationOut,
695 VkDeviceSize *offsetOut);
696 void free(VmaVirtualAllocation allocation, VkDeviceSize offset);
697 void calculateStats(vma::StatInfo *pStatInfo) const;
698 };
699
700 // CommandPool implementation.
701 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
702 {
703 if (valid())
704 {
705 vkDestroyCommandPool(device, mHandle, nullptr);
706 mHandle = VK_NULL_HANDLE;
707 }
708 }
709
710 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
711 {
712 ASSERT(valid());
713 return vkResetCommandPool(device, mHandle, flags);
714 }
715
716 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
717 uint32_t commandBufferCount,
718 const VkCommandBuffer *commandBuffers)
719 {
720 ASSERT(valid());
721 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
722 }
723
724 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
725 {
726 ASSERT(!valid());
727 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
728 }
729
730 namespace priv
731 {
732
733 // CommandBuffer implementation.
734 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
735 {
736 VkCommandBuffer handle = mHandle;
737 mHandle = nullptr;
738 return handle;
739 }
740
741 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
742 const VkCommandBufferAllocateInfo &createInfo)
743 {
744 ASSERT(!valid());
745 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
746 }
747
748 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
749 VkImageLayout srcImageLayout,
750 const Image &dstImage,
751 VkImageLayout dstImageLayout,
752 uint32_t regionCount,
753 const VkImageBlit *regions,
754 VkFilter filter)
755 {
756 ASSERT(valid() && srcImage.valid() && dstImage.valid());
757 ASSERT(regionCount == 1);
758 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
759 dstImageLayout, 1, regions, filter);
760 }
761
762 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
763 {
764 ASSERT(valid());
765 return vkBeginCommandBuffer(mHandle, &info);
766 }
767
768 ANGLE_INLINE VkResult CommandBuffer::end()
769 {
770 ASSERT(valid());
771 return vkEndCommandBuffer(mHandle);
772 }
773
774 ANGLE_INLINE VkResult CommandBuffer::reset()
775 {
776 ASSERT(valid());
777 return vkResetCommandBuffer(mHandle, 0);
778 }
779
780 ANGLE_INLINE void CommandBuffer::nextSubpass(VkSubpassContents subpassContents)
781 {
782 ASSERT(valid());
783 vkCmdNextSubpass(mHandle, subpassContents);
784 }
785
786 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
787 VkPipelineStageFlags dstStageMask,
788 const VkMemoryBarrier *memoryBarrier)
789 {
790 ASSERT(valid());
791 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, memoryBarrier, 0, nullptr, 0,
792 nullptr);
793 }
794
795 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
796 VkPipelineStageFlags dstStageMask,
797 VkDependencyFlags dependencyFlags,
798 uint32_t memoryBarrierCount,
799 const VkMemoryBarrier *memoryBarriers,
800 uint32_t bufferMemoryBarrierCount,
801 const VkBufferMemoryBarrier *bufferMemoryBarriers,
802 uint32_t imageMemoryBarrierCount,
803 const VkImageMemoryBarrier *imageMemoryBarriers)
804 {
805 ASSERT(valid());
806 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
807 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
808 imageMemoryBarrierCount, imageMemoryBarriers);
809 }
810
811 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
812 VkPipelineStageFlags dstStageMask,
813 const VkImageMemoryBarrier &imageMemoryBarrier)
814 {
815 ASSERT(valid());
816 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
817 &imageMemoryBarrier);
818 }
819
820 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
821 {
822 releaseHandle();
823 }
824
825 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
826 {
827 if (valid())
828 {
829 ASSERT(commandPool.valid());
830 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
831 mHandle = VK_NULL_HANDLE;
832 }
833 }
834
835 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
836 const Buffer &destBuffer,
837 uint32_t regionCount,
838 const VkBufferCopy *regions)
839 {
840 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
841 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
842 }
843
844 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
845 const Image &dstImage,
846 VkImageLayout dstImageLayout,
847 uint32_t regionCount,
848 const VkBufferImageCopy *regions)
849 {
850 ASSERT(valid() && dstImage.valid());
851 ASSERT(srcBuffer != VK_NULL_HANDLE);
852 ASSERT(regionCount == 1);
853 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
854 }
855
856 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
857 VkImageLayout srcImageLayout,
858 VkBuffer dstBuffer,
859 uint32_t regionCount,
860 const VkBufferImageCopy *regions)
861 {
862 ASSERT(valid() && srcImage.valid());
863 ASSERT(dstBuffer != VK_NULL_HANDLE);
864 ASSERT(regionCount == 1);
865 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
866 }
867
868 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
869 VkImageLayout imageLayout,
870 const VkClearColorValue &color,
871 uint32_t rangeCount,
872 const VkImageSubresourceRange *ranges)
873 {
874 ASSERT(valid());
875 ASSERT(rangeCount == 1);
876 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
877 }
878
879 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
880 const Image &image,
881 VkImageLayout imageLayout,
882 const VkClearDepthStencilValue &depthStencil,
883 uint32_t rangeCount,
884 const VkImageSubresourceRange *ranges)
885 {
886 ASSERT(valid());
887 ASSERT(rangeCount == 1);
888 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
889 }
890
891 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
892 const VkClearAttachment *attachments,
893 uint32_t rectCount,
894 const VkClearRect *rects)
895 {
896 ASSERT(valid());
897 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
898 }
899
900 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
901 VkImageLayout srcImageLayout,
902 const Image &dstImage,
903 VkImageLayout dstImageLayout,
904 uint32_t regionCount,
905 const VkImageCopy *regions)
906 {
907 ASSERT(valid() && srcImage.valid() && dstImage.valid());
908 ASSERT(regionCount == 1);
909 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
910 dstImageLayout, 1, regions);
911 }
912
913 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
914 VkSubpassContents subpassContents)
915 {
916 ASSERT(valid());
917 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
918 }
919
920 ANGLE_INLINE void CommandBuffer::endRenderPass()
921 {
922 ASSERT(mHandle != VK_NULL_HANDLE);
923 vkCmdEndRenderPass(mHandle);
924 }
925
926 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
927 VkDeviceSize offset,
928 VkIndexType indexType)
929 {
930 ASSERT(valid());
931 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
932 }
933
934 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
935 VkPipelineBindPoint pipelineBindPoint,
936 DescriptorSetIndex firstSet,
937 uint32_t descriptorSetCount,
938 const VkDescriptorSet *descriptorSets,
939 uint32_t dynamicOffsetCount,
940 const uint32_t *dynamicOffsets)
941 {
942 ASSERT(valid() && layout.valid());
943 vkCmdBindDescriptorSets(this->mHandle, pipelineBindPoint, layout.getHandle(),
944 ToUnderlying(firstSet), descriptorSetCount, descriptorSets,
945 dynamicOffsetCount, dynamicOffsets);
946 }
947
948 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
949 const CommandBuffer *commandBuffers)
950 {
951 ASSERT(valid());
952 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
953 }
954
955 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
956 size_t *allocatedMemoryOut) const
957 {
958 // No data available.
959 *usedMemoryOut = 0;
960 *allocatedMemoryOut = 1;
961 }
962
963 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
964 VkDeviceSize dstOffset,
965 VkDeviceSize size,
966 uint32_t data)
967 {
968 ASSERT(valid());
969 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
970 }
971
972 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
973 VkShaderStageFlags flag,
974 uint32_t offset,
975 uint32_t size,
976 const void *data)
977 {
978 ASSERT(valid() && layout.valid());
979 ASSERT(offset == 0);
980 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
981 }
982
983 ANGLE_INLINE void CommandBuffer::setBlendConstants(const float blendConstants[4])
984 {
985 ASSERT(valid());
986 vkCmdSetBlendConstants(mHandle, blendConstants);
987 }
988
989 ANGLE_INLINE void CommandBuffer::setCullMode(VkCullModeFlags cullMode)
990 {
991 ASSERT(valid());
992 vkCmdSetCullModeEXT(mHandle, cullMode);
993 }
994
995 ANGLE_INLINE void CommandBuffer::setDepthBias(float depthBiasConstantFactor,
996 float depthBiasClamp,
997 float depthBiasSlopeFactor)
998 {
999 ASSERT(valid());
1000 vkCmdSetDepthBias(mHandle, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1001 }
1002
1003 ANGLE_INLINE void CommandBuffer::setDepthBiasEnable(VkBool32 depthBiasEnable)
1004 {
1005 ASSERT(valid());
1006 vkCmdSetDepthBiasEnableEXT(mHandle, depthBiasEnable);
1007 }
1008
1009 ANGLE_INLINE void CommandBuffer::setDepthCompareOp(VkCompareOp depthCompareOp)
1010 {
1011 ASSERT(valid());
1012 vkCmdSetDepthCompareOpEXT(mHandle, depthCompareOp);
1013 }
1014
1015 ANGLE_INLINE void CommandBuffer::setDepthTestEnable(VkBool32 depthTestEnable)
1016 {
1017 ASSERT(valid());
1018 vkCmdSetDepthTestEnableEXT(mHandle, depthTestEnable);
1019 }
1020
1021 ANGLE_INLINE void CommandBuffer::setDepthWriteEnable(VkBool32 depthWriteEnable)
1022 {
1023 ASSERT(valid());
1024 vkCmdSetDepthWriteEnableEXT(mHandle, depthWriteEnable);
1025 }
1026
1027 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
1028 {
1029 ASSERT(valid() && event != VK_NULL_HANDLE);
1030 vkCmdSetEvent(mHandle, event, stageMask);
1031 }
1032
1033 ANGLE_INLINE void CommandBuffer::setFragmentShadingRate(const VkExtent2D *fragmentSize,
1034 VkFragmentShadingRateCombinerOpKHR ops[2])
1035 {
1036 ASSERT(valid() && fragmentSize != nullptr);
1037 vkCmdSetFragmentShadingRateKHR(mHandle, fragmentSize, ops);
1038 }
1039
1040 ANGLE_INLINE void CommandBuffer::setFrontFace(VkFrontFace frontFace)
1041 {
1042 ASSERT(valid());
1043 vkCmdSetFrontFaceEXT(mHandle, frontFace);
1044 }
1045
1046 ANGLE_INLINE void CommandBuffer::setLineWidth(float lineWidth)
1047 {
1048 ASSERT(valid());
1049 vkCmdSetLineWidth(mHandle, lineWidth);
1050 }
1051
1052 ANGLE_INLINE void CommandBuffer::setLogicOp(VkLogicOp logicOp)
1053 {
1054 ASSERT(valid());
1055 vkCmdSetLogicOpEXT(mHandle, logicOp);
1056 }
1057
1058 ANGLE_INLINE void CommandBuffer::setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable)
1059 {
1060 ASSERT(valid());
1061 vkCmdSetPrimitiveRestartEnableEXT(mHandle, primitiveRestartEnable);
1062 }
1063
1064 ANGLE_INLINE void CommandBuffer::setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable)
1065 {
1066 ASSERT(valid());
1067 vkCmdSetRasterizerDiscardEnableEXT(mHandle, rasterizerDiscardEnable);
1068 }
1069
1070 ANGLE_INLINE void CommandBuffer::setScissor(uint32_t firstScissor,
1071 uint32_t scissorCount,
1072 const VkRect2D *scissors)
1073 {
1074 ASSERT(valid() && scissors != nullptr);
1075 vkCmdSetScissor(mHandle, firstScissor, scissorCount, scissors);
1076 }
1077
1078 ANGLE_INLINE void CommandBuffer::setStencilCompareMask(uint32_t compareFrontMask,
1079 uint32_t compareBackMask)
1080 {
1081 ASSERT(valid());
1082 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, compareFrontMask);
1083 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_BACK_BIT, compareBackMask);
1084 }
1085
1086 ANGLE_INLINE void CommandBuffer::setStencilOp(VkStencilFaceFlags faceMask,
1087 VkStencilOp failOp,
1088 VkStencilOp passOp,
1089 VkStencilOp depthFailOp,
1090 VkCompareOp compareOp)
1091 {
1092 ASSERT(valid());
1093 vkCmdSetStencilOpEXT(mHandle, faceMask, failOp, passOp, depthFailOp, compareOp);
1094 }
1095
1096 ANGLE_INLINE void CommandBuffer::setStencilReference(uint32_t frontReference,
1097 uint32_t backReference)
1098 {
1099 ASSERT(valid());
1100 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_FRONT_BIT, frontReference);
1101 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_BACK_BIT, backReference);
1102 }
1103
1104 ANGLE_INLINE void CommandBuffer::setStencilTestEnable(VkBool32 stencilTestEnable)
1105 {
1106 ASSERT(valid());
1107 vkCmdSetStencilTestEnableEXT(mHandle, stencilTestEnable);
1108 }
1109
1110 ANGLE_INLINE void CommandBuffer::setStencilWriteMask(uint32_t writeFrontMask,
1111 uint32_t writeBackMask)
1112 {
1113 ASSERT(valid());
1114 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, writeFrontMask);
1115 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_BACK_BIT, writeBackMask);
1116 }
1117
1118 ANGLE_INLINE void CommandBuffer::setViewport(uint32_t firstViewport,
1119 uint32_t viewportCount,
1120 const VkViewport *viewports)
1121 {
1122 ASSERT(valid() && viewports != nullptr);
1123 vkCmdSetViewport(mHandle, firstViewport, viewportCount, viewports);
1124 }
1125
1126 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
1127 {
1128 ASSERT(valid() && event != VK_NULL_HANDLE);
1129 vkCmdResetEvent(mHandle, event, stageMask);
1130 }
1131
1132 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
1133 const VkEvent *events,
1134 VkPipelineStageFlags srcStageMask,
1135 VkPipelineStageFlags dstStageMask,
1136 uint32_t memoryBarrierCount,
1137 const VkMemoryBarrier *memoryBarriers,
1138 uint32_t bufferMemoryBarrierCount,
1139 const VkBufferMemoryBarrier *bufferMemoryBarriers,
1140 uint32_t imageMemoryBarrierCount,
1141 const VkImageMemoryBarrier *imageMemoryBarriers)
1142 {
1143 ASSERT(valid());
1144 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
1145 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
1146 imageMemoryBarrierCount, imageMemoryBarriers);
1147 }
1148
1149 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
1150 uint32_t firstQuery,
1151 uint32_t queryCount)
1152 {
1153 ASSERT(valid() && queryPool.valid());
1154 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
1155 }
1156
1157 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
1158 VkImageLayout srcImageLayout,
1159 const Image &dstImage,
1160 VkImageLayout dstImageLayout,
1161 uint32_t regionCount,
1162 const VkImageResolve *regions)
1163 {
1164 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1165 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1166 dstImageLayout, regionCount, regions);
1167 }
1168
1169 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
1170 uint32_t query,
1171 VkQueryControlFlags flags)
1172 {
1173 ASSERT(valid() && queryPool.valid());
1174 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
1175 }
1176
1177 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1178 {
1179 ASSERT(valid() && queryPool.valid());
1180 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1181 }
1182
1183 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1184 const QueryPool &queryPool,
1185 uint32_t query)
1186 {
1187 ASSERT(valid());
1188 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1189 }
1190
1191 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1192 uint32_t instanceCount,
1193 uint32_t firstVertex,
1194 uint32_t firstInstance)
1195 {
1196 ASSERT(valid());
1197 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1198 }
1199
1200 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1201 uint32_t instanceCount,
1202 uint32_t firstIndex,
1203 int32_t vertexOffset,
1204 uint32_t firstInstance)
1205 {
1206 ASSERT(valid());
1207 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1208 }
1209
1210 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1211 VkDeviceSize offset,
1212 uint32_t drawCount,
1213 uint32_t stride)
1214 {
1215 ASSERT(valid());
1216 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1217 }
1218
1219 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1220 VkDeviceSize offset,
1221 uint32_t drawCount,
1222 uint32_t stride)
1223 {
1224 ASSERT(valid());
1225 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1226 }
1227
1228 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1229 uint32_t groupCountY,
1230 uint32_t groupCountZ)
1231 {
1232 ASSERT(valid());
1233 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1234 }
1235
1236 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1237 {
1238 ASSERT(valid());
1239 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1240 }
1241
1242 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1243 const Pipeline &pipeline)
1244 {
1245 ASSERT(valid() && pipeline.valid());
1246 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1247 }
1248
1249 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1250 {
1251 ASSERT(valid() && pipeline.valid());
1252 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1253 }
1254
1255 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1256 {
1257 ASSERT(valid() && pipeline.valid());
1258 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1259 }
1260
1261 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1262 uint32_t bindingCount,
1263 const VkBuffer *buffers,
1264 const VkDeviceSize *offsets)
1265 {
1266 ASSERT(valid());
1267 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1268 }
1269
1270 ANGLE_INLINE void CommandBuffer::bindVertexBuffers2(uint32_t firstBinding,
1271 uint32_t bindingCount,
1272 const VkBuffer *buffers,
1273 const VkDeviceSize *offsets,
1274 const VkDeviceSize *sizes,
1275 const VkDeviceSize *strides)
1276 {
1277 ASSERT(valid());
1278 vkCmdBindVertexBuffers2EXT(mHandle, firstBinding, bindingCount, buffers, offsets, sizes,
1279 strides);
1280 }
1281
1282 ANGLE_INLINE void CommandBuffer::beginTransformFeedback(uint32_t firstCounterBuffer,
1283 uint32_t counterBufferCount,
1284 const VkBuffer *counterBuffers,
1285 const VkDeviceSize *counterBufferOffsets)
1286 {
1287 ASSERT(valid());
1288 ASSERT(vkCmdBeginTransformFeedbackEXT);
1289 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1290 counterBufferOffsets);
1291 }
1292
1293 ANGLE_INLINE void CommandBuffer::endTransformFeedback(uint32_t firstCounterBuffer,
1294 uint32_t counterBufferCount,
1295 const VkBuffer *counterBuffers,
1296 const VkDeviceSize *counterBufferOffsets)
1297 {
1298 ASSERT(valid());
1299 ASSERT(vkCmdEndTransformFeedbackEXT);
1300 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1301 counterBufferOffsets);
1302 }
1303
1304 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffers(uint32_t firstBinding,
1305 uint32_t bindingCount,
1306 const VkBuffer *buffers,
1307 const VkDeviceSize *offsets,
1308 const VkDeviceSize *sizes)
1309 {
1310 ASSERT(valid());
1311 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1312 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1313 sizes);
1314 }
1315
1316 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1317 {
1318 ASSERT(valid());
1319 {
1320 #if !defined(ANGLE_SHARED_LIBVULKAN)
1321 // When the vulkan-loader is statically linked, we need to use the extension
1322 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1323 // with volk, this will default to the function definitions with no namespace
1324 using rx::vkCmdBeginDebugUtilsLabelEXT;
1325 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1326 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1327 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1328 }
1329 }
1330
1331 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1332 {
1333 ASSERT(valid());
1334 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1335 vkCmdEndDebugUtilsLabelEXT(mHandle);
1336 }
1337
1338 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1339 {
1340 ASSERT(valid());
1341 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1342 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1343 }
1344 } // namespace priv
1345
1346 // Image implementation.
1347 ANGLE_INLINE void Image::setHandle(VkImage handle)
1348 {
1349 mHandle = handle;
1350 }
1351
1352 ANGLE_INLINE void Image::reset()
1353 {
1354 mHandle = VK_NULL_HANDLE;
1355 }
1356
1357 ANGLE_INLINE void Image::destroy(VkDevice device)
1358 {
1359 if (valid())
1360 {
1361 vkDestroyImage(device, mHandle, nullptr);
1362 mHandle = VK_NULL_HANDLE;
1363 }
1364 }
1365
1366 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1367 {
1368 ASSERT(!valid());
1369 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1370 }
1371
1372 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1373 VkMemoryRequirements *requirementsOut) const
1374 {
1375 ASSERT(valid());
1376 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1377 }
1378
1379 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1380 {
1381 ASSERT(valid() && deviceMemory.valid());
1382 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1383 }
1384
1385 ANGLE_INLINE VkResult Image::bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo)
1386 {
1387 ASSERT(valid());
1388 return vkBindImageMemory2KHR(device, 1, &bindInfo);
1389 }
1390
1391 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1392 VkImageAspectFlagBits aspectMask,
1393 uint32_t mipLevel,
1394 uint32_t arrayLayer,
1395 VkSubresourceLayout *outSubresourceLayout) const
1396 {
1397 VkImageSubresource subresource = {};
1398 subresource.aspectMask = aspectMask;
1399 subresource.mipLevel = mipLevel;
1400 subresource.arrayLayer = arrayLayer;
1401
1402 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1403 }
1404
1405 // ImageView implementation.
1406 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1407 {
1408 if (valid())
1409 {
1410 vkDestroyImageView(device, mHandle, nullptr);
1411 mHandle = VK_NULL_HANDLE;
1412 }
1413 }
1414
1415 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1416 {
1417 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1418 }
1419
1420 // Semaphore implementation.
1421 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1422 {
1423 if (valid())
1424 {
1425 vkDestroySemaphore(device, mHandle, nullptr);
1426 mHandle = VK_NULL_HANDLE;
1427 }
1428 }
1429
1430 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1431 {
1432 ASSERT(!valid());
1433
1434 VkSemaphoreCreateInfo semaphoreInfo = {};
1435 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1436 semaphoreInfo.flags = 0;
1437
1438 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1439 }
1440
1441 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1442 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1443 {
1444 ASSERT(valid());
1445 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1446 }
1447
1448 // Framebuffer implementation.
1449 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1450 {
1451 if (valid())
1452 {
1453 vkDestroyFramebuffer(device, mHandle, nullptr);
1454 mHandle = VK_NULL_HANDLE;
1455 }
1456 }
1457
1458 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1459 {
1460 ASSERT(!valid());
1461 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1462 }
1463
1464 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1465 {
1466 mHandle = handle;
1467 }
1468
1469 // DeviceMemory implementation.
1470 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1471 {
1472 if (valid())
1473 {
1474 vkFreeMemory(device, mHandle, nullptr);
1475 mHandle = VK_NULL_HANDLE;
1476 }
1477 }
1478
1479 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1480 {
1481 ASSERT(!valid());
1482 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1483 }
1484
1485 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1486 VkDeviceSize offset,
1487 VkDeviceSize size,
1488 VkMemoryMapFlags flags,
1489 uint8_t **mapPointer) const
1490 {
1491 ASSERT(valid());
1492 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1493 }
1494
1495 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1496 {
1497 ASSERT(valid());
1498 vkUnmapMemory(device, mHandle);
1499 }
1500
1501 ANGLE_INLINE void DeviceMemory::flush(VkDevice device, VkMappedMemoryRange &memRange)
1502 {
1503 vkFlushMappedMemoryRanges(device, 1, &memRange);
1504 }
1505
1506 ANGLE_INLINE void DeviceMemory::invalidate(VkDevice device, VkMappedMemoryRange &memRange)
1507 {
1508 vkInvalidateMappedMemoryRanges(device, 1, &memRange);
1509 }
1510
1511 // Allocator implementation.
1512 ANGLE_INLINE void Allocator::destroy()
1513 {
1514 if (valid())
1515 {
1516 vma::DestroyAllocator(mHandle);
1517 mHandle = VK_NULL_HANDLE;
1518 }
1519 }
1520
1521 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1522 VkDevice device,
1523 VkInstance instance,
1524 uint32_t apiVersion,
1525 VkDeviceSize preferredLargeHeapBlockSize)
1526 {
1527 ASSERT(!valid());
1528 return vma::InitAllocator(physicalDevice, device, instance, apiVersion,
1529 preferredLargeHeapBlockSize, &mHandle);
1530 }
1531
1532 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1533 VkMemoryPropertyFlags requiredFlags,
1534 VkMemoryPropertyFlags preferredFlags,
1535 bool persistentlyMappedBuffers,
1536 uint32_t *memoryTypeIndexOut,
1537 Buffer *bufferOut,
1538 Allocation *allocationOut) const
1539 {
1540 ASSERT(valid());
1541 ASSERT(bufferOut && !bufferOut->valid());
1542 ASSERT(allocationOut && !allocationOut->valid());
1543 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1544 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1545 &allocationOut->mHandle);
1546 }
1547
1548 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1549 VkMemoryPropertyFlags *flagsOut) const
1550 {
1551 ASSERT(valid());
1552 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1553 }
1554
1555 ANGLE_INLINE VkResult
1556 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1557 VkMemoryPropertyFlags requiredFlags,
1558 VkMemoryPropertyFlags preferredFlags,
1559 bool persistentlyMappedBuffers,
1560 uint32_t *memoryTypeIndexOut) const
1561 {
1562 ASSERT(valid());
1563 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1564 preferredFlags, persistentlyMappedBuffers,
1565 memoryTypeIndexOut);
1566 }
1567
1568 ANGLE_INLINE void Allocator::buildStatsString(char **statsString, VkBool32 detailedMap)
1569 {
1570 ASSERT(valid());
1571 vma::BuildStatsString(mHandle, statsString, detailedMap);
1572 }
1573
1574 ANGLE_INLINE void Allocator::freeStatsString(char *statsString)
1575 {
1576 ASSERT(valid());
1577 vma::FreeStatsString(mHandle, statsString);
1578 }
1579
1580 // Allocation implementation.
1581 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1582 {
1583 if (valid())
1584 {
1585 vma::FreeMemory(allocator.getHandle(), mHandle);
1586 mHandle = VK_NULL_HANDLE;
1587 }
1588 }
1589
1590 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1591 {
1592 ASSERT(valid());
1593 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1594 }
1595
1596 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1597 {
1598 ASSERT(valid());
1599 vma::UnmapMemory(allocator.getHandle(), mHandle);
1600 }
1601
1602 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1603 VkDeviceSize offset,
1604 VkDeviceSize size) const
1605 {
1606 ASSERT(valid());
1607 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1608 }
1609
1610 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1611 VkDeviceSize offset,
1612 VkDeviceSize size) const
1613 {
1614 ASSERT(valid());
1615 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1616 }
1617
1618 // RenderPass implementation.
1619 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1620 {
1621 if (valid())
1622 {
1623 vkDestroyRenderPass(device, mHandle, nullptr);
1624 mHandle = VK_NULL_HANDLE;
1625 }
1626 }
1627
1628 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1629 {
1630 ASSERT(!valid());
1631 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1632 }
1633
1634 ANGLE_INLINE VkResult RenderPass::init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo)
1635 {
1636 ASSERT(!valid());
1637 return vkCreateRenderPass2KHR(device, &createInfo, nullptr, &mHandle);
1638 }
1639
1640 // Buffer implementation.
1641 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1642 {
1643 if (valid())
1644 {
1645 vkDestroyBuffer(device, mHandle, nullptr);
1646 mHandle = VK_NULL_HANDLE;
1647 }
1648 }
1649
1650 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1651 {
1652 ASSERT(!valid());
1653 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1654 }
1655
1656 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device,
1657 const DeviceMemory &deviceMemory,
1658 VkDeviceSize offset)
1659 {
1660 ASSERT(valid() && deviceMemory.valid());
1661 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), offset);
1662 }
1663
1664 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1665 VkMemoryRequirements *memoryRequirementsOut)
1666 {
1667 ASSERT(valid());
1668 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1669 }
1670
1671 // BufferView implementation.
1672 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1673 {
1674 if (valid())
1675 {
1676 vkDestroyBufferView(device, mHandle, nullptr);
1677 mHandle = VK_NULL_HANDLE;
1678 }
1679 }
1680
1681 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1682 {
1683 ASSERT(!valid());
1684 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1685 }
1686
1687 // ShaderModule implementation.
1688 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1689 {
1690 if (mHandle != VK_NULL_HANDLE)
1691 {
1692 vkDestroyShaderModule(device, mHandle, nullptr);
1693 mHandle = VK_NULL_HANDLE;
1694 }
1695 }
1696
1697 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1698 const VkShaderModuleCreateInfo &createInfo)
1699 {
1700 ASSERT(!valid());
1701 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1702 }
1703
1704 // PipelineLayout implementation.
1705 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1706 {
1707 if (valid())
1708 {
1709 vkDestroyPipelineLayout(device, mHandle, nullptr);
1710 mHandle = VK_NULL_HANDLE;
1711 }
1712 }
1713
1714 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1715 const VkPipelineLayoutCreateInfo &createInfo)
1716 {
1717 ASSERT(!valid());
1718 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1719 }
1720
1721 // PipelineCache implementation.
1722 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1723 {
1724 if (valid())
1725 {
1726 vkDestroyPipelineCache(device, mHandle, nullptr);
1727 mHandle = VK_NULL_HANDLE;
1728 }
1729 }
1730
1731 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1732 const VkPipelineCacheCreateInfo &createInfo)
1733 {
1734 ASSERT(!valid());
1735 // Note: if we are concerned with memory usage of this cache, we should give it custom
1736 // allocators. Also, failure of this function is of little importance.
1737 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1738 }
1739
1740 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1741 uint32_t srcCacheCount,
1742 const VkPipelineCache *srcCaches) const
1743 {
1744 ASSERT(valid());
1745 return vkMergePipelineCaches(device, mHandle, srcCacheCount, srcCaches);
1746 }
1747
1748 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1749 size_t *cacheSize,
1750 void *cacheData) const
1751 {
1752 ASSERT(valid());
1753
1754 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1755 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1756 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1757 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1758 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1759 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1760 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1761 }
1762
1763 // Pipeline implementation.
1764 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1765 {
1766 if (valid())
1767 {
1768 vkDestroyPipeline(device, mHandle, nullptr);
1769 mHandle = VK_NULL_HANDLE;
1770 }
1771 }
1772
1773 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1774 const VkGraphicsPipelineCreateInfo &createInfo,
1775 const PipelineCache &pipelineCacheVk)
1776 {
1777 ASSERT(!valid());
1778 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1779 &mHandle);
1780 }
1781
1782 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1783 const VkComputePipelineCreateInfo &createInfo,
1784 const PipelineCache &pipelineCacheVk)
1785 {
1786 ASSERT(!valid());
1787 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1788 &mHandle);
1789 }
1790
1791 // DescriptorSetLayout implementation.
1792 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1793 {
1794 if (valid())
1795 {
1796 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1797 mHandle = VK_NULL_HANDLE;
1798 }
1799 }
1800
1801 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1802 const VkDescriptorSetLayoutCreateInfo &createInfo)
1803 {
1804 ASSERT(!valid());
1805 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1806 }
1807
1808 // DescriptorPool implementation.
1809 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1810 {
1811 if (valid())
1812 {
1813 vkDestroyDescriptorPool(device, mHandle, nullptr);
1814 mHandle = VK_NULL_HANDLE;
1815 }
1816 }
1817
1818 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1819 const VkDescriptorPoolCreateInfo &createInfo)
1820 {
1821 ASSERT(!valid());
1822 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1823 }
1824
1825 ANGLE_INLINE VkResult
1826 DescriptorPool::allocateDescriptorSets(VkDevice device,
1827 const VkDescriptorSetAllocateInfo &allocInfo,
1828 VkDescriptorSet *descriptorSetsOut)
1829 {
1830 ASSERT(valid());
1831 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1832 }
1833
1834 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1835 uint32_t descriptorSetCount,
1836 const VkDescriptorSet *descriptorSets)
1837 {
1838 ASSERT(valid());
1839 ASSERT(descriptorSetCount > 0);
1840 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1841 }
1842
1843 // Sampler implementation.
1844 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1845 {
1846 if (valid())
1847 {
1848 vkDestroySampler(device, mHandle, nullptr);
1849 mHandle = VK_NULL_HANDLE;
1850 }
1851 }
1852
1853 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1854 {
1855 ASSERT(!valid());
1856 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1857 }
1858
1859 // SamplerYuvConversion implementation.
1860 ANGLE_INLINE void SamplerYcbcrConversion::destroy(VkDevice device)
1861 {
1862 if (valid())
1863 {
1864 vkDestroySamplerYcbcrConversionKHR(device, mHandle, nullptr);
1865 mHandle = VK_NULL_HANDLE;
1866 }
1867 }
1868
1869 ANGLE_INLINE VkResult
1870 SamplerYcbcrConversion::init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo)
1871 {
1872 ASSERT(!valid());
1873 return vkCreateSamplerYcbcrConversionKHR(device, &createInfo, nullptr, &mHandle);
1874 }
1875
1876 // Event implementation.
1877 ANGLE_INLINE void Event::destroy(VkDevice device)
1878 {
1879 if (valid())
1880 {
1881 vkDestroyEvent(device, mHandle, nullptr);
1882 mHandle = VK_NULL_HANDLE;
1883 }
1884 }
1885
1886 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
1887 {
1888 ASSERT(!valid());
1889 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
1890 }
1891
1892 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
1893 {
1894 ASSERT(valid());
1895 return vkGetEventStatus(device, mHandle);
1896 }
1897
1898 ANGLE_INLINE VkResult Event::set(VkDevice device) const
1899 {
1900 ASSERT(valid());
1901 return vkSetEvent(device, mHandle);
1902 }
1903
1904 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
1905 {
1906 ASSERT(valid());
1907 return vkResetEvent(device, mHandle);
1908 }
1909
1910 // Fence implementation.
1911 ANGLE_INLINE void Fence::destroy(VkDevice device)
1912 {
1913 if (valid())
1914 {
1915 vkDestroyFence(device, mHandle, nullptr);
1916 mHandle = VK_NULL_HANDLE;
1917 }
1918 }
1919
1920 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
1921 {
1922 ASSERT(!valid());
1923 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
1924 }
1925
1926 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
1927 {
1928 ASSERT(valid());
1929 return vkResetFences(device, 1, &mHandle);
1930 }
1931
1932 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
1933 {
1934 ASSERT(valid());
1935 return vkGetFenceStatus(device, mHandle);
1936 }
1937
1938 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
1939 {
1940 ASSERT(valid());
1941 return vkWaitForFences(device, 1, &mHandle, true, timeout);
1942 }
1943
1944 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
1945 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
1946 {
1947 ASSERT(valid());
1948 return vkImportFenceFdKHR(device, &importFenceFdInfo);
1949 }
1950
1951 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
1952 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
1953 int *fdOut) const
1954 {
1955 ASSERT(valid());
1956 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
1957 }
1958
1959 // QueryPool implementation.
1960 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
1961 {
1962 if (valid())
1963 {
1964 vkDestroyQueryPool(device, mHandle, nullptr);
1965 mHandle = VK_NULL_HANDLE;
1966 }
1967 }
1968
1969 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
1970 {
1971 ASSERT(!valid());
1972 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
1973 }
1974
1975 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
1976 uint32_t firstQuery,
1977 uint32_t queryCount,
1978 size_t dataSize,
1979 void *data,
1980 VkDeviceSize stride,
1981 VkQueryResultFlags flags) const
1982 {
1983 ASSERT(valid());
1984 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
1985 flags);
1986 }
1987
1988 // VirtualBlock implementation.
1989 ANGLE_INLINE void VirtualBlock::destroy(VkDevice device)
1990 {
1991 if (valid())
1992 {
1993 vma::DestroyVirtualBlock(mHandle);
1994 mHandle = VK_NULL_HANDLE;
1995 }
1996 }
1997
1998 ANGLE_INLINE VkResult VirtualBlock::init(VkDevice device,
1999 vma::VirtualBlockCreateFlags flags,
2000 VkDeviceSize size)
2001 {
2002 return vma::CreateVirtualBlock(size, flags, &mHandle);
2003 }
2004
2005 ANGLE_INLINE VkResult VirtualBlock::allocate(VkDeviceSize size,
2006 VkDeviceSize alignment,
2007 VmaVirtualAllocation *allocationOut,
2008 VkDeviceSize *offsetOut)
2009 {
2010 return vma::VirtualAllocate(mHandle, size, alignment, allocationOut, offsetOut);
2011 }
2012
2013 ANGLE_INLINE void VirtualBlock::free(VmaVirtualAllocation allocation, VkDeviceSize offset)
2014 {
2015 vma::VirtualFree(mHandle, allocation, offset);
2016 }
2017
2018 ANGLE_INLINE void VirtualBlock::calculateStats(vma::StatInfo *pStatInfo) const
2019 {
2020 vma::CalculateVirtualBlockStats(mHandle, pStatInfo);
2021 }
2022 } // namespace vk
2023 } // namespace rx
2024
2025 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
2026