1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "libANGLE/renderer/renderer_utils.h"
15 #include "libANGLE/renderer/vulkan/vk_headers.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17
18 namespace rx
19 {
20 namespace vk
21 {
22 // Helper macros that apply to all the wrapped object types.
23 // Unimplemented handle types:
24 // Instance
25 // PhysicalDevice
26 // Device
27 // Queue
28 // DescriptorSet
29
30 #define ANGLE_HANDLE_TYPES_X(FUNC) \
31 FUNC(Allocation) \
32 FUNC(Allocator) \
33 FUNC(Buffer) \
34 FUNC(BufferView) \
35 FUNC(CommandPool) \
36 FUNC(DescriptorPool) \
37 FUNC(DescriptorSetLayout) \
38 FUNC(DeviceMemory) \
39 FUNC(Event) \
40 FUNC(Fence) \
41 FUNC(Framebuffer) \
42 FUNC(Image) \
43 FUNC(ImageView) \
44 FUNC(Pipeline) \
45 FUNC(PipelineCache) \
46 FUNC(PipelineLayout) \
47 FUNC(QueryPool) \
48 FUNC(RenderPass) \
49 FUNC(Sampler) \
50 FUNC(Semaphore) \
51 FUNC(ShaderModule)
52
53 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
54
55 enum class HandleType
56 {
57 Invalid,
58 CommandBuffer,
59 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
60 };
61
62 #undef ANGLE_COMMA_SEP_FUNC
63
64 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)65 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
66 namespace priv
67 {
68 class CommandBuffer;
69 } // namespace priv
70 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
71
72 // Returns the HandleType of a Vk Handle.
73 template <typename T>
74 struct HandleTypeHelper;
75
76 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
77 template <> \
78 struct HandleTypeHelper<TYPE> \
79 { \
80 constexpr static HandleType kHandleType = HandleType::TYPE; \
81 };
82
83 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
84 template <>
85 struct HandleTypeHelper<priv::CommandBuffer>
86 {
87 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
88 };
89
90 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
91
92 // Base class for all wrapped vulkan objects. Implements several common helper routines.
93 template <typename DerivedT, typename HandleT>
94 class WrappedObject : angle::NonCopyable
95 {
96 public:
97 HandleT getHandle() const { return mHandle; }
98 void setHandle(HandleT handle) { mHandle = handle; }
99 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
100
101 const HandleT *ptr() const { return &mHandle; }
102
103 HandleT release()
104 {
105 HandleT handle = mHandle;
106 mHandle = VK_NULL_HANDLE;
107 return handle;
108 }
109
110 protected:
111 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
112 ~WrappedObject() { ASSERT(!valid()); }
113
114 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
115 {
116 other.mHandle = VK_NULL_HANDLE;
117 }
118
119 // Only works to initialize empty objects, since we don't have the device handle.
120 WrappedObject &operator=(WrappedObject &&other)
121 {
122 ASSERT(!valid());
123 std::swap(mHandle, other.mHandle);
124 return *this;
125 }
126
127 HandleT mHandle;
128 };
129
130 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
131 {
132 public:
133 CommandPool() = default;
134
135 void destroy(VkDevice device);
136 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
137 void freeCommandBuffers(VkDevice device,
138 uint32_t commandBufferCount,
139 const VkCommandBuffer *commandBuffers);
140
141 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
142 };
143
144 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
145 {
146 public:
147 Pipeline() = default;
148 void destroy(VkDevice device);
149
150 VkResult initGraphics(VkDevice device,
151 const VkGraphicsPipelineCreateInfo &createInfo,
152 const PipelineCache &pipelineCacheVk);
153 VkResult initCompute(VkDevice device,
154 const VkComputePipelineCreateInfo &createInfo,
155 const PipelineCache &pipelineCacheVk);
156 };
157
158 namespace priv
159 {
160
161 // Helper class that wraps a Vulkan command buffer.
162 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
163 {
164 public:
165 CommandBuffer() = default;
166
167 VkCommandBuffer releaseHandle();
168
169 // This is used for normal pool allocated command buffers. It reset the handle.
170 void destroy(VkDevice device);
171
172 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
173 void destroy(VkDevice device, const CommandPool &commandPool);
174
175 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
176
177 // There is no way to know if the command buffer contains any commands.
178 static bool CanKnowIfEmpty() { return false; }
179 bool empty() const { return false; }
180
181 using WrappedObject::operator=;
182
183 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
184 {
185 return (features.inheritedQueries == VK_TRUE);
186 }
187
188 // Vulkan command buffers are executed as secondary command buffers within a primary command
189 // buffer.
190 static constexpr bool ExecutesInline() { return false; }
191
192 VkResult begin(const VkCommandBufferBeginInfo &info);
193
194 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
195
196 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
197
198 void bindDescriptorSets(const PipelineLayout &layout,
199 VkPipelineBindPoint pipelineBindPoint,
200 uint32_t firstSet,
201 uint32_t descriptorSetCount,
202 const VkDescriptorSet *descriptorSets,
203 uint32_t dynamicOffsetCount,
204 const uint32_t *dynamicOffsets);
205 void bindGraphicsPipeline(const Pipeline &pipeline);
206 void bindComputePipeline(const Pipeline &pipeline);
207 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
208
209 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
210 void bindVertexBuffers(uint32_t firstBinding,
211 uint32_t bindingCount,
212 const VkBuffer *buffers,
213 const VkDeviceSize *offsets);
214
215 void blitImage(const Image &srcImage,
216 VkImageLayout srcImageLayout,
217 const Image &dstImage,
218 VkImageLayout dstImageLayout,
219 uint32_t regionCount,
220 const VkImageBlit *regions,
221 VkFilter filter);
222
223 void clearColorImage(const Image &image,
224 VkImageLayout imageLayout,
225 const VkClearColorValue &color,
226 uint32_t rangeCount,
227 const VkImageSubresourceRange *ranges);
228 void clearDepthStencilImage(const Image &image,
229 VkImageLayout imageLayout,
230 const VkClearDepthStencilValue &depthStencil,
231 uint32_t rangeCount,
232 const VkImageSubresourceRange *ranges);
233
234 void clearAttachments(uint32_t attachmentCount,
235 const VkClearAttachment *attachments,
236 uint32_t rectCount,
237 const VkClearRect *rects);
238
239 void copyBuffer(const Buffer &srcBuffer,
240 const Buffer &destBuffer,
241 uint32_t regionCount,
242 const VkBufferCopy *regions);
243
244 void copyBufferToImage(VkBuffer srcBuffer,
245 const Image &dstImage,
246 VkImageLayout dstImageLayout,
247 uint32_t regionCount,
248 const VkBufferImageCopy *regions);
249 void copyImageToBuffer(const Image &srcImage,
250 VkImageLayout srcImageLayout,
251 VkBuffer dstBuffer,
252 uint32_t regionCount,
253 const VkBufferImageCopy *regions);
254 void copyImage(const Image &srcImage,
255 VkImageLayout srcImageLayout,
256 const Image &dstImage,
257 VkImageLayout dstImageLayout,
258 uint32_t regionCount,
259 const VkImageCopy *regions);
260
261 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
262 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
263
264 void draw(uint32_t vertexCount,
265 uint32_t instanceCount,
266 uint32_t firstVertex,
267 uint32_t firstInstance);
268 void draw(uint32_t vertexCount, uint32_t firstVertex);
269 void drawInstanced(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex);
270 void drawInstancedBaseInstance(uint32_t vertexCount,
271 uint32_t instanceCount,
272 uint32_t firstVertex,
273 uint32_t firstInstance);
274 void drawIndexed(uint32_t indexCount,
275 uint32_t instanceCount,
276 uint32_t firstIndex,
277 int32_t vertexOffset,
278 uint32_t firstInstance);
279 void drawIndexed(uint32_t indexCount);
280 void drawIndexedBaseVertex(uint32_t indexCount, uint32_t vertexOffset);
281 void drawIndexedInstanced(uint32_t indexCount, uint32_t instanceCount);
282 void drawIndexedInstancedBaseVertex(uint32_t indexCount,
283 uint32_t instanceCount,
284 uint32_t vertexOffset);
285 void drawIndexedInstancedBaseVertexBaseInstance(uint32_t indexCount,
286 uint32_t instanceCount,
287 uint32_t firstIndex,
288 int32_t vertexOffset,
289 uint32_t firstInstance);
290 void drawIndexedIndirect(const Buffer &buffer,
291 VkDeviceSize offset,
292 uint32_t drawCount,
293 uint32_t stride);
294 void drawIndirect(const Buffer &buffer,
295 VkDeviceSize offset,
296 uint32_t drawCount,
297 uint32_t stride);
298
299 VkResult end();
300 void endQuery(const QueryPool &queryPool, uint32_t query);
301 void endRenderPass();
302 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
303
304 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
305
306 void executionBarrier(VkPipelineStageFlags stageMask);
307
308 void fillBuffer(const Buffer &dstBuffer,
309 VkDeviceSize dstOffset,
310 VkDeviceSize size,
311 uint32_t data);
312
313 void bufferBarrier(VkPipelineStageFlags srcStageMask,
314 VkPipelineStageFlags dstStageMask,
315 const VkBufferMemoryBarrier *bufferMemoryBarrier);
316
317 void imageBarrier(VkPipelineStageFlags srcStageMask,
318 VkPipelineStageFlags dstStageMask,
319 const VkImageMemoryBarrier &imageMemoryBarrier);
320
321 void memoryBarrier(VkPipelineStageFlags srcStageMask,
322 VkPipelineStageFlags dstStageMask,
323 const VkMemoryBarrier *memoryBarrier);
324
325 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
326 VkPipelineStageFlags dstStageMask,
327 VkDependencyFlags dependencyFlags,
328 uint32_t memoryBarrierCount,
329 const VkMemoryBarrier *memoryBarriers,
330 uint32_t bufferMemoryBarrierCount,
331 const VkBufferMemoryBarrier *bufferMemoryBarriers,
332 uint32_t imageMemoryBarrierCount,
333 const VkImageMemoryBarrier *imageMemoryBarriers);
334
335 void pushConstants(const PipelineLayout &layout,
336 VkShaderStageFlags flag,
337 uint32_t offset,
338 uint32_t size,
339 const void *data);
340
341 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
342 VkResult reset();
343 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
344 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
345 void resolveImage(const Image &srcImage,
346 VkImageLayout srcImageLayout,
347 const Image &dstImage,
348 VkImageLayout dstImageLayout,
349 uint32_t regionCount,
350 const VkImageResolve *regions);
351 void waitEvents(uint32_t eventCount,
352 const VkEvent *events,
353 VkPipelineStageFlags srcStageMask,
354 VkPipelineStageFlags dstStageMask,
355 uint32_t memoryBarrierCount,
356 const VkMemoryBarrier *memoryBarriers,
357 uint32_t bufferMemoryBarrierCount,
358 const VkBufferMemoryBarrier *bufferMemoryBarriers,
359 uint32_t imageMemoryBarrierCount,
360 const VkImageMemoryBarrier *imageMemoryBarriers);
361
362 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
363 const QueryPool &queryPool,
364 uint32_t query);
365
366 // VK_EXT_transform_feedback
367 void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,
368 uint32_t counterBufferCount,
369 const VkBuffer *counterBuffers,
370 const VkDeviceSize *counterBufferOffsets);
371 void endTransformFeedbackEXT(uint32_t firstCounterBuffer,
372 uint32_t counterBufferCount,
373 const VkBuffer *counterBuffers,
374 const VkDeviceSize *counterBufferOffsets);
375 void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,
376 uint32_t bindingCount,
377 const VkBuffer *buffers,
378 const VkDeviceSize *offsets,
379 const VkDeviceSize *sizes);
380
381 // VK_EXT_debug_utils
382 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
383 void endDebugUtilsLabelEXT();
384 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
385 };
386 } // namespace priv
387
388 class Image final : public WrappedObject<Image, VkImage>
389 {
390 public:
391 Image() = default;
392
393 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
394 void setHandle(VkImage handle);
395
396 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
397 void reset();
398
399 // Called on shutdown when the helper class *does* own the handle to the image resource.
400 void destroy(VkDevice device);
401
402 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
403
404 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
405 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
406
407 void getSubresourceLayout(VkDevice device,
408 VkImageAspectFlagBits aspectMask,
409 uint32_t mipLevel,
410 uint32_t arrayLayer,
411 VkSubresourceLayout *outSubresourceLayout) const;
412 };
413
414 class ImageView final : public WrappedObject<ImageView, VkImageView>
415 {
416 public:
417 ImageView() = default;
418 void destroy(VkDevice device);
419
420 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
421 };
422
423 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
424 {
425 public:
426 Semaphore() = default;
427 void destroy(VkDevice device);
428
429 VkResult init(VkDevice device);
430 VkResult init(VkDevice device, const VkSemaphoreCreateInfo &createInfo);
431 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
432 };
433
434 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
435 {
436 public:
437 Framebuffer() = default;
438 void destroy(VkDevice device);
439
440 // Use this method only in necessary cases. (RenderPass)
441 void setHandle(VkFramebuffer handle);
442
443 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
444 };
445
446 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
447 {
448 public:
449 DeviceMemory() = default;
450 void destroy(VkDevice device);
451
452 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
453 VkResult map(VkDevice device,
454 VkDeviceSize offset,
455 VkDeviceSize size,
456 VkMemoryMapFlags flags,
457 uint8_t **mapPointer) const;
458 void unmap(VkDevice device) const;
459 };
460
461 class Allocator : public WrappedObject<Allocator, VmaAllocator>
462 {
463 public:
464 Allocator() = default;
465 void destroy();
466
467 VkResult init(VkPhysicalDevice physicalDevice,
468 VkDevice device,
469 VkInstance instance,
470 uint32_t apiVersion);
471
472 // Initializes the buffer handle and memory allocation.
473 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
474 VkMemoryPropertyFlags requiredFlags,
475 VkMemoryPropertyFlags preferredFlags,
476 bool persistentlyMappedBuffers,
477 uint32_t *memoryTypeIndexOut,
478 Buffer *bufferOut,
479 Allocation *allocationOut) const;
480
481 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
482 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
483 VkMemoryPropertyFlags requiredFlags,
484 VkMemoryPropertyFlags preferredFlags,
485 bool persistentlyMappedBuffers,
486 uint32_t *memoryTypeIndexOut) const;
487 };
488
489 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
490 {
491 public:
492 Allocation() = default;
493 void destroy(const Allocator &allocator);
494
495 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
496 void unmap(const Allocator &allocator) const;
497 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size);
498 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size);
499
500 private:
501 friend class Allocator;
502 };
503
504 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
505 {
506 public:
507 RenderPass() = default;
508 void destroy(VkDevice device);
509
510 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
511 };
512
513 enum class StagingUsage
514 {
515 Read,
516 Write,
517 Both,
518 };
519
520 class Buffer final : public WrappedObject<Buffer, VkBuffer>
521 {
522 public:
523 Buffer() = default;
524 void destroy(VkDevice device);
525
526 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
527 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
528 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
529
530 private:
531 friend class Allocator;
532 };
533
534 class BufferView final : public WrappedObject<BufferView, VkBufferView>
535 {
536 public:
537 BufferView() = default;
538 void destroy(VkDevice device);
539
540 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
541 };
542
543 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
544 {
545 public:
546 ShaderModule() = default;
547 void destroy(VkDevice device);
548
549 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
550 };
551
552 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
553 {
554 public:
555 PipelineLayout() = default;
556 void destroy(VkDevice device);
557
558 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
559 };
560
561 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
562 {
563 public:
564 PipelineCache() = default;
565 void destroy(VkDevice device);
566
567 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
568 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData);
569 VkResult merge(VkDevice device,
570 VkPipelineCache dstCache,
571 uint32_t srcCacheCount,
572 const VkPipelineCache *srcCaches);
573 };
574
575 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
576 {
577 public:
578 DescriptorSetLayout() = default;
579 void destroy(VkDevice device);
580
581 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
582 };
583
584 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
585 {
586 public:
587 DescriptorPool() = default;
588 void destroy(VkDevice device);
589
590 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
591
592 VkResult allocateDescriptorSets(VkDevice device,
593 const VkDescriptorSetAllocateInfo &allocInfo,
594 VkDescriptorSet *descriptorSetsOut);
595 VkResult freeDescriptorSets(VkDevice device,
596 uint32_t descriptorSetCount,
597 const VkDescriptorSet *descriptorSets);
598 };
599
600 class Sampler final : public WrappedObject<Sampler, VkSampler>
601 {
602 public:
603 Sampler() = default;
604 void destroy(VkDevice device);
605 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
606 };
607
608 class Event final : public WrappedObject<Event, VkEvent>
609 {
610 public:
611 Event() = default;
612 void destroy(VkDevice device);
613 using WrappedObject::operator=;
614
615 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
616 VkResult getStatus(VkDevice device) const;
617 VkResult set(VkDevice device) const;
618 VkResult reset(VkDevice device) const;
619 };
620
621 class Fence final : public WrappedObject<Fence, VkFence>
622 {
623 public:
624 Fence() = default;
625 void destroy(VkDevice device);
626 using WrappedObject::operator=;
627
628 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
629 VkResult reset(VkDevice device);
630 VkResult getStatus(VkDevice device) const;
631 VkResult wait(VkDevice device, uint64_t timeout) const;
632 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
633 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
634 };
635
636 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
637 {
638 public:
639 QueryPool() = default;
640 void destroy(VkDevice device);
641
642 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
643 VkResult getResults(VkDevice device,
644 uint32_t firstQuery,
645 uint32_t queryCount,
646 size_t dataSize,
647 void *data,
648 VkDeviceSize stride,
649 VkQueryResultFlags flags) const;
650 };
651
652 // CommandPool implementation.
653 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
654 {
655 if (valid())
656 {
657 vkDestroyCommandPool(device, mHandle, nullptr);
658 mHandle = VK_NULL_HANDLE;
659 }
660 }
661
662 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
663 {
664 ASSERT(valid());
665 return vkResetCommandPool(device, mHandle, flags);
666 }
667
668 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
669 uint32_t commandBufferCount,
670 const VkCommandBuffer *commandBuffers)
671 {
672 ASSERT(valid());
673 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
674 }
675
676 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
677 {
678 ASSERT(!valid());
679 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
680 }
681
682 namespace priv
683 {
684
685 // CommandBuffer implementation.
686 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
687 {
688 VkCommandBuffer handle = mHandle;
689 mHandle = nullptr;
690 return handle;
691 }
692
693 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
694 const VkCommandBufferAllocateInfo &createInfo)
695 {
696 ASSERT(!valid());
697 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
698 }
699
700 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
701 VkImageLayout srcImageLayout,
702 const Image &dstImage,
703 VkImageLayout dstImageLayout,
704 uint32_t regionCount,
705 const VkImageBlit *regions,
706 VkFilter filter)
707 {
708 ASSERT(valid() && srcImage.valid() && dstImage.valid());
709 ASSERT(regionCount == 1);
710 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
711 dstImageLayout, 1, regions, filter);
712 }
713
714 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
715 {
716 ASSERT(valid());
717 return vkBeginCommandBuffer(mHandle, &info);
718 }
719
720 ANGLE_INLINE VkResult CommandBuffer::end()
721 {
722 ASSERT(valid());
723 return vkEndCommandBuffer(mHandle);
724 }
725
726 ANGLE_INLINE VkResult CommandBuffer::reset()
727 {
728 ASSERT(valid());
729 return vkResetCommandBuffer(mHandle, 0);
730 }
731
732 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
733 VkPipelineStageFlags dstStageMask,
734 const VkMemoryBarrier *memoryBarrier)
735 {
736 ASSERT(valid());
737 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, memoryBarrier, 0, nullptr, 0,
738 nullptr);
739 }
740
741 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
742 VkPipelineStageFlags dstStageMask,
743 VkDependencyFlags dependencyFlags,
744 uint32_t memoryBarrierCount,
745 const VkMemoryBarrier *memoryBarriers,
746 uint32_t bufferMemoryBarrierCount,
747 const VkBufferMemoryBarrier *bufferMemoryBarriers,
748 uint32_t imageMemoryBarrierCount,
749 const VkImageMemoryBarrier *imageMemoryBarriers)
750 {
751 ASSERT(valid());
752 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
753 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
754 imageMemoryBarrierCount, imageMemoryBarriers);
755 }
756
757 ANGLE_INLINE void CommandBuffer::executionBarrier(VkPipelineStageFlags stageMask)
758 {
759 ASSERT(valid());
760 vkCmdPipelineBarrier(mHandle, stageMask, stageMask, 0, 0, nullptr, 0, nullptr, 0, nullptr);
761 }
762
763 ANGLE_INLINE void CommandBuffer::bufferBarrier(VkPipelineStageFlags srcStageMask,
764 VkPipelineStageFlags dstStageMask,
765 const VkBufferMemoryBarrier *bufferMemoryBarrier)
766 {
767 ASSERT(valid());
768 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 1, bufferMemoryBarrier,
769 0, nullptr);
770 }
771
772 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
773 VkPipelineStageFlags dstStageMask,
774 const VkImageMemoryBarrier &imageMemoryBarrier)
775 {
776 ASSERT(valid());
777 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
778 &imageMemoryBarrier);
779 }
780
781 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
782 {
783 releaseHandle();
784 }
785
786 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
787 {
788 if (valid())
789 {
790 ASSERT(commandPool.valid());
791 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
792 mHandle = VK_NULL_HANDLE;
793 }
794 }
795
796 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
797 const Buffer &destBuffer,
798 uint32_t regionCount,
799 const VkBufferCopy *regions)
800 {
801 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
802 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
803 }
804
805 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
806 const Image &dstImage,
807 VkImageLayout dstImageLayout,
808 uint32_t regionCount,
809 const VkBufferImageCopy *regions)
810 {
811 ASSERT(valid() && dstImage.valid());
812 ASSERT(srcBuffer != VK_NULL_HANDLE);
813 ASSERT(regionCount == 1);
814 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
815 }
816
817 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
818 VkImageLayout srcImageLayout,
819 VkBuffer dstBuffer,
820 uint32_t regionCount,
821 const VkBufferImageCopy *regions)
822 {
823 ASSERT(valid() && srcImage.valid());
824 ASSERT(dstBuffer != VK_NULL_HANDLE);
825 ASSERT(regionCount == 1);
826 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
827 }
828
829 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
830 VkImageLayout imageLayout,
831 const VkClearColorValue &color,
832 uint32_t rangeCount,
833 const VkImageSubresourceRange *ranges)
834 {
835 ASSERT(valid());
836 ASSERT(rangeCount == 1);
837 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
838 }
839
840 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
841 const Image &image,
842 VkImageLayout imageLayout,
843 const VkClearDepthStencilValue &depthStencil,
844 uint32_t rangeCount,
845 const VkImageSubresourceRange *ranges)
846 {
847 ASSERT(valid());
848 ASSERT(rangeCount == 1);
849 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
850 }
851
852 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
853 const VkClearAttachment *attachments,
854 uint32_t rectCount,
855 const VkClearRect *rects)
856 {
857 ASSERT(valid());
858 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
859 }
860
861 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
862 VkImageLayout srcImageLayout,
863 const Image &dstImage,
864 VkImageLayout dstImageLayout,
865 uint32_t regionCount,
866 const VkImageCopy *regions)
867 {
868 ASSERT(valid() && srcImage.valid() && dstImage.valid());
869 ASSERT(regionCount == 1);
870 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
871 dstImageLayout, 1, regions);
872 }
873
874 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
875 VkSubpassContents subpassContents)
876 {
877 ASSERT(valid());
878 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
879 }
880
881 ANGLE_INLINE void CommandBuffer::endRenderPass()
882 {
883 ASSERT(mHandle != VK_NULL_HANDLE);
884 vkCmdEndRenderPass(mHandle);
885 }
886
887 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
888 VkDeviceSize offset,
889 VkIndexType indexType)
890 {
891 ASSERT(valid());
892 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
893 }
894
895 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
896 VkPipelineBindPoint pipelineBindPoint,
897 uint32_t firstSet,
898 uint32_t descriptorSetCount,
899 const VkDescriptorSet *descriptorSets,
900 uint32_t dynamicOffsetCount,
901 const uint32_t *dynamicOffsets)
902 {
903 ASSERT(valid() && layout.valid());
904 vkCmdBindDescriptorSets(mHandle, pipelineBindPoint, layout.getHandle(), firstSet,
905 descriptorSetCount, descriptorSets, dynamicOffsetCount, dynamicOffsets);
906 }
907
908 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
909 const CommandBuffer *commandBuffers)
910 {
911 ASSERT(valid());
912 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
913 }
914
915 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
916 size_t *allocatedMemoryOut) const
917 {
918 // No data available.
919 *usedMemoryOut = 0;
920 *allocatedMemoryOut = 1;
921 }
922
923 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
924 VkDeviceSize dstOffset,
925 VkDeviceSize size,
926 uint32_t data)
927 {
928 ASSERT(valid());
929 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
930 }
931
932 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
933 VkShaderStageFlags flag,
934 uint32_t offset,
935 uint32_t size,
936 const void *data)
937 {
938 ASSERT(valid() && layout.valid());
939 ASSERT(offset == 0);
940 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
941 }
942
943 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
944 {
945 ASSERT(valid() && event != VK_NULL_HANDLE);
946 vkCmdSetEvent(mHandle, event, stageMask);
947 }
948
949 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
950 {
951 ASSERT(valid() && event != VK_NULL_HANDLE);
952 vkCmdResetEvent(mHandle, event, stageMask);
953 }
954
955 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
956 const VkEvent *events,
957 VkPipelineStageFlags srcStageMask,
958 VkPipelineStageFlags dstStageMask,
959 uint32_t memoryBarrierCount,
960 const VkMemoryBarrier *memoryBarriers,
961 uint32_t bufferMemoryBarrierCount,
962 const VkBufferMemoryBarrier *bufferMemoryBarriers,
963 uint32_t imageMemoryBarrierCount,
964 const VkImageMemoryBarrier *imageMemoryBarriers)
965 {
966 ASSERT(valid());
967 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
968 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
969 imageMemoryBarrierCount, imageMemoryBarriers);
970 }
971
972 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
973 uint32_t firstQuery,
974 uint32_t queryCount)
975 {
976 ASSERT(valid() && queryPool.valid());
977 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
978 }
979
980 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
981 VkImageLayout srcImageLayout,
982 const Image &dstImage,
983 VkImageLayout dstImageLayout,
984 uint32_t regionCount,
985 const VkImageResolve *regions)
986 {
987 ASSERT(valid() && srcImage.valid() && dstImage.valid());
988 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
989 dstImageLayout, regionCount, regions);
990 }
991
992 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
993 uint32_t query,
994 VkQueryControlFlags flags)
995 {
996 ASSERT(valid() && queryPool.valid());
997 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
998 }
999
1000 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1001 {
1002 ASSERT(valid() && queryPool.valid());
1003 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1004 }
1005
1006 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1007 const QueryPool &queryPool,
1008 uint32_t query)
1009 {
1010 ASSERT(valid());
1011 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1012 }
1013
1014 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1015 uint32_t instanceCount,
1016 uint32_t firstVertex,
1017 uint32_t firstInstance)
1018 {
1019 ASSERT(valid());
1020 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1021 }
1022
1023 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount, uint32_t firstVertex)
1024 {
1025 ASSERT(valid());
1026 vkCmdDraw(mHandle, vertexCount, 1, firstVertex, 0);
1027 }
1028
1029 ANGLE_INLINE void CommandBuffer::drawInstanced(uint32_t vertexCount,
1030 uint32_t instanceCount,
1031 uint32_t firstVertex)
1032 {
1033 ASSERT(valid());
1034 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, 0);
1035 }
1036
1037 ANGLE_INLINE void CommandBuffer::drawInstancedBaseInstance(uint32_t vertexCount,
1038 uint32_t instanceCount,
1039 uint32_t firstVertex,
1040 uint32_t firstInstance)
1041 {
1042 ASSERT(valid());
1043 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1044 }
1045
1046 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1047 uint32_t instanceCount,
1048 uint32_t firstIndex,
1049 int32_t vertexOffset,
1050 uint32_t firstInstance)
1051 {
1052 ASSERT(valid());
1053 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1054 }
1055
1056 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount)
1057 {
1058 ASSERT(valid());
1059 vkCmdDrawIndexed(mHandle, indexCount, 1, 0, 0, 0);
1060 }
1061
1062 ANGLE_INLINE void CommandBuffer::drawIndexedBaseVertex(uint32_t indexCount, uint32_t vertexOffset)
1063 {
1064 ASSERT(valid());
1065 vkCmdDrawIndexed(mHandle, indexCount, 1, 0, vertexOffset, 0);
1066 }
1067
1068 ANGLE_INLINE void CommandBuffer::drawIndexedInstanced(uint32_t indexCount, uint32_t instanceCount)
1069 {
1070 ASSERT(valid());
1071 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, 0, 0, 0);
1072 }
1073
1074 ANGLE_INLINE void CommandBuffer::drawIndexedInstancedBaseVertex(uint32_t indexCount,
1075 uint32_t instanceCount,
1076 uint32_t vertexOffset)
1077 {
1078 ASSERT(valid());
1079 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, 0, vertexOffset, 0);
1080 }
1081
1082 ANGLE_INLINE void CommandBuffer::drawIndexedInstancedBaseVertexBaseInstance(uint32_t indexCount,
1083 uint32_t instanceCount,
1084 uint32_t firstIndex,
1085 int32_t vertexOffset,
1086 uint32_t firstInstance)
1087 {
1088 ASSERT(valid());
1089 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1090 }
1091
1092 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1093 VkDeviceSize offset,
1094 uint32_t drawCount,
1095 uint32_t stride)
1096 {
1097 ASSERT(valid());
1098 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1099 }
1100
1101 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1102 VkDeviceSize offset,
1103 uint32_t drawCount,
1104 uint32_t stride)
1105 {
1106 ASSERT(valid());
1107 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1108 }
1109
1110 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1111 uint32_t groupCountY,
1112 uint32_t groupCountZ)
1113 {
1114 ASSERT(valid());
1115 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1116 }
1117
1118 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1119 {
1120 ASSERT(valid());
1121 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1122 }
1123
1124 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1125 const Pipeline &pipeline)
1126 {
1127 ASSERT(valid() && pipeline.valid());
1128 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1129 }
1130
1131 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1132 {
1133 ASSERT(valid() && pipeline.valid());
1134 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1135 }
1136
1137 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1138 {
1139 ASSERT(valid() && pipeline.valid());
1140 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1141 }
1142
1143 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1144 uint32_t bindingCount,
1145 const VkBuffer *buffers,
1146 const VkDeviceSize *offsets)
1147 {
1148 ASSERT(valid());
1149 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1150 }
1151
1152 ANGLE_INLINE void CommandBuffer::beginTransformFeedbackEXT(uint32_t firstCounterBuffer,
1153 uint32_t counterBufferCount,
1154 const VkBuffer *counterBuffers,
1155 const VkDeviceSize *counterBufferOffsets)
1156 {
1157 ASSERT(valid());
1158 ASSERT(vkCmdBeginTransformFeedbackEXT);
1159 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1160 counterBufferOffsets);
1161 }
1162
1163 ANGLE_INLINE void CommandBuffer::endTransformFeedbackEXT(uint32_t firstCounterBuffer,
1164 uint32_t counterBufferCount,
1165 const VkBuffer *counterBuffers,
1166 const VkDeviceSize *counterBufferOffsets)
1167 {
1168 ASSERT(valid());
1169 ASSERT(vkCmdEndTransformFeedbackEXT);
1170 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1171 counterBufferOffsets);
1172 }
1173
1174 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT(uint32_t firstBinding,
1175 uint32_t bindingCount,
1176 const VkBuffer *buffers,
1177 const VkDeviceSize *offsets,
1178 const VkDeviceSize *sizes)
1179 {
1180 ASSERT(valid());
1181 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1182 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1183 sizes);
1184 }
1185
1186 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1187 {
1188 ASSERT(valid());
1189 {
1190 #if !defined(ANGLE_SHARED_LIBVULKAN)
1191 // When the vulkan-loader is statically linked, we need to use the extension
1192 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1193 // with volk, this will default to the function definitions with no namespace
1194 using rx::vkCmdBeginDebugUtilsLabelEXT;
1195 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1196 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1197 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1198 }
1199 }
1200
1201 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1202 {
1203 ASSERT(valid());
1204 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1205 vkCmdEndDebugUtilsLabelEXT(mHandle);
1206 }
1207
1208 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1209 {
1210 ASSERT(valid());
1211 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1212 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1213 }
1214 } // namespace priv
1215
1216 // Image implementation.
1217 ANGLE_INLINE void Image::setHandle(VkImage handle)
1218 {
1219 mHandle = handle;
1220 }
1221
1222 ANGLE_INLINE void Image::reset()
1223 {
1224 mHandle = VK_NULL_HANDLE;
1225 }
1226
1227 ANGLE_INLINE void Image::destroy(VkDevice device)
1228 {
1229 if (valid())
1230 {
1231 vkDestroyImage(device, mHandle, nullptr);
1232 mHandle = VK_NULL_HANDLE;
1233 }
1234 }
1235
1236 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1237 {
1238 ASSERT(!valid());
1239 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1240 }
1241
1242 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1243 VkMemoryRequirements *requirementsOut) const
1244 {
1245 ASSERT(valid());
1246 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1247 }
1248
1249 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1250 {
1251 ASSERT(valid() && deviceMemory.valid());
1252 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1253 }
1254
1255 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1256 VkImageAspectFlagBits aspectMask,
1257 uint32_t mipLevel,
1258 uint32_t arrayLayer,
1259 VkSubresourceLayout *outSubresourceLayout) const
1260 {
1261 VkImageSubresource subresource = {};
1262 subresource.aspectMask = aspectMask;
1263 subresource.mipLevel = mipLevel;
1264 subresource.arrayLayer = arrayLayer;
1265
1266 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1267 }
1268
1269 // ImageView implementation.
1270 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1271 {
1272 if (valid())
1273 {
1274 vkDestroyImageView(device, mHandle, nullptr);
1275 mHandle = VK_NULL_HANDLE;
1276 }
1277 }
1278
1279 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1280 {
1281 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1282 }
1283
1284 // Semaphore implementation.
1285 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1286 {
1287 if (valid())
1288 {
1289 vkDestroySemaphore(device, mHandle, nullptr);
1290 mHandle = VK_NULL_HANDLE;
1291 }
1292 }
1293
1294 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1295 {
1296 ASSERT(!valid());
1297
1298 VkSemaphoreCreateInfo semaphoreInfo = {};
1299 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1300 semaphoreInfo.flags = 0;
1301
1302 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1303 }
1304
1305 ANGLE_INLINE VkResult Semaphore::init(VkDevice device, const VkSemaphoreCreateInfo &createInfo)
1306 {
1307 ASSERT(valid());
1308 return vkCreateSemaphore(device, &createInfo, nullptr, &mHandle);
1309 }
1310
1311 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1312 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1313 {
1314 ASSERT(valid());
1315 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1316 }
1317
1318 // Framebuffer implementation.
1319 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1320 {
1321 if (valid())
1322 {
1323 vkDestroyFramebuffer(device, mHandle, nullptr);
1324 mHandle = VK_NULL_HANDLE;
1325 }
1326 }
1327
1328 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1329 {
1330 ASSERT(!valid());
1331 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1332 }
1333
1334 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1335 {
1336 mHandle = handle;
1337 }
1338
1339 // DeviceMemory implementation.
1340 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1341 {
1342 if (valid())
1343 {
1344 vkFreeMemory(device, mHandle, nullptr);
1345 mHandle = VK_NULL_HANDLE;
1346 }
1347 }
1348
1349 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1350 {
1351 ASSERT(!valid());
1352 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1353 }
1354
1355 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1356 VkDeviceSize offset,
1357 VkDeviceSize size,
1358 VkMemoryMapFlags flags,
1359 uint8_t **mapPointer) const
1360 {
1361 ASSERT(valid());
1362 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1363 }
1364
1365 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1366 {
1367 ASSERT(valid());
1368 vkUnmapMemory(device, mHandle);
1369 }
1370
1371 // Allocator implementation.
1372 ANGLE_INLINE void Allocator::destroy()
1373 {
1374 if (valid())
1375 {
1376 vma::DestroyAllocator(mHandle);
1377 mHandle = VK_NULL_HANDLE;
1378 }
1379 }
1380
1381 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1382 VkDevice device,
1383 VkInstance instance,
1384 uint32_t apiVersion)
1385 {
1386 ASSERT(!valid());
1387 return vma::InitAllocator(physicalDevice, device, instance, apiVersion, &mHandle);
1388 }
1389
1390 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1391 VkMemoryPropertyFlags requiredFlags,
1392 VkMemoryPropertyFlags preferredFlags,
1393 bool persistentlyMappedBuffers,
1394 uint32_t *memoryTypeIndexOut,
1395 Buffer *bufferOut,
1396 Allocation *allocationOut) const
1397 {
1398 ASSERT(valid());
1399 ASSERT(bufferOut && !bufferOut->valid());
1400 ASSERT(allocationOut && !allocationOut->valid());
1401 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1402 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1403 &allocationOut->mHandle);
1404 }
1405
1406 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1407 VkMemoryPropertyFlags *flagsOut) const
1408 {
1409 ASSERT(valid());
1410 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1411 }
1412
1413 ANGLE_INLINE VkResult
1414 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1415 VkMemoryPropertyFlags requiredFlags,
1416 VkMemoryPropertyFlags preferredFlags,
1417 bool persistentlyMappedBuffers,
1418 uint32_t *memoryTypeIndexOut) const
1419 {
1420 ASSERT(valid());
1421 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1422 preferredFlags, persistentlyMappedBuffers,
1423 memoryTypeIndexOut);
1424 }
1425
1426 // Allocation implementation.
1427 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1428 {
1429 if (valid())
1430 {
1431 vma::FreeMemory(allocator.getHandle(), mHandle);
1432 mHandle = VK_NULL_HANDLE;
1433 }
1434 }
1435
1436 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1437 {
1438 ASSERT(valid());
1439 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1440 }
1441
1442 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1443 {
1444 ASSERT(valid());
1445 vma::UnmapMemory(allocator.getHandle(), mHandle);
1446 }
1447
1448 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1449 VkDeviceSize offset,
1450 VkDeviceSize size)
1451 {
1452 ASSERT(valid());
1453 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1454 }
1455
1456 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1457 VkDeviceSize offset,
1458 VkDeviceSize size)
1459 {
1460 ASSERT(valid());
1461 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1462 }
1463
1464 // RenderPass implementation.
1465 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1466 {
1467 if (valid())
1468 {
1469 vkDestroyRenderPass(device, mHandle, nullptr);
1470 mHandle = VK_NULL_HANDLE;
1471 }
1472 }
1473
1474 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1475 {
1476 ASSERT(!valid());
1477 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1478 }
1479
1480 // Buffer implementation.
1481 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1482 {
1483 if (valid())
1484 {
1485 vkDestroyBuffer(device, mHandle, nullptr);
1486 mHandle = VK_NULL_HANDLE;
1487 }
1488 }
1489
1490 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1491 {
1492 ASSERT(!valid());
1493 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1494 }
1495
1496 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device, const DeviceMemory &deviceMemory)
1497 {
1498 ASSERT(valid() && deviceMemory.valid());
1499 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), 0);
1500 }
1501
1502 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1503 VkMemoryRequirements *memoryRequirementsOut)
1504 {
1505 ASSERT(valid());
1506 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1507 }
1508
1509 // BufferView implementation.
1510 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1511 {
1512 if (valid())
1513 {
1514 vkDestroyBufferView(device, mHandle, nullptr);
1515 mHandle = VK_NULL_HANDLE;
1516 }
1517 }
1518
1519 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1520 {
1521 ASSERT(!valid());
1522 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1523 }
1524
1525 // ShaderModule implementation.
1526 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1527 {
1528 if (mHandle != VK_NULL_HANDLE)
1529 {
1530 vkDestroyShaderModule(device, mHandle, nullptr);
1531 mHandle = VK_NULL_HANDLE;
1532 }
1533 }
1534
1535 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1536 const VkShaderModuleCreateInfo &createInfo)
1537 {
1538 ASSERT(!valid());
1539 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1540 }
1541
1542 // PipelineLayout implementation.
1543 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1544 {
1545 if (valid())
1546 {
1547 vkDestroyPipelineLayout(device, mHandle, nullptr);
1548 mHandle = VK_NULL_HANDLE;
1549 }
1550 }
1551
1552 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1553 const VkPipelineLayoutCreateInfo &createInfo)
1554 {
1555 ASSERT(!valid());
1556 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1557 }
1558
1559 // PipelineCache implementation.
1560 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1561 {
1562 if (valid())
1563 {
1564 vkDestroyPipelineCache(device, mHandle, nullptr);
1565 mHandle = VK_NULL_HANDLE;
1566 }
1567 }
1568
1569 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1570 const VkPipelineCacheCreateInfo &createInfo)
1571 {
1572 ASSERT(!valid());
1573 // Note: if we are concerned with memory usage of this cache, we should give it custom
1574 // allocators. Also, failure of this function is of little importance.
1575 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1576 }
1577
1578 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1579 VkPipelineCache dstCache,
1580 uint32_t srcCacheCount,
1581 const VkPipelineCache *srcCaches)
1582 {
1583 ASSERT(valid());
1584 return vkMergePipelineCaches(device, dstCache, srcCacheCount, srcCaches);
1585 }
1586
1587 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1588 size_t *cacheSize,
1589 void *cacheData)
1590 {
1591 ASSERT(valid());
1592
1593 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1594 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1595 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1596 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1597 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1598 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1599 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1600 }
1601
1602 // Pipeline implementation.
1603 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1604 {
1605 if (valid())
1606 {
1607 vkDestroyPipeline(device, mHandle, nullptr);
1608 mHandle = VK_NULL_HANDLE;
1609 }
1610 }
1611
1612 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1613 const VkGraphicsPipelineCreateInfo &createInfo,
1614 const PipelineCache &pipelineCacheVk)
1615 {
1616 ASSERT(!valid());
1617 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1618 &mHandle);
1619 }
1620
1621 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1622 const VkComputePipelineCreateInfo &createInfo,
1623 const PipelineCache &pipelineCacheVk)
1624 {
1625 ASSERT(!valid());
1626 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1627 &mHandle);
1628 }
1629
1630 // DescriptorSetLayout implementation.
1631 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1632 {
1633 if (valid())
1634 {
1635 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1636 mHandle = VK_NULL_HANDLE;
1637 }
1638 }
1639
1640 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1641 const VkDescriptorSetLayoutCreateInfo &createInfo)
1642 {
1643 ASSERT(!valid());
1644 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1645 }
1646
1647 // DescriptorPool implementation.
1648 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1649 {
1650 if (valid())
1651 {
1652 vkDestroyDescriptorPool(device, mHandle, nullptr);
1653 mHandle = VK_NULL_HANDLE;
1654 }
1655 }
1656
1657 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1658 const VkDescriptorPoolCreateInfo &createInfo)
1659 {
1660 ASSERT(!valid());
1661 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1662 }
1663
1664 ANGLE_INLINE VkResult
1665 DescriptorPool::allocateDescriptorSets(VkDevice device,
1666 const VkDescriptorSetAllocateInfo &allocInfo,
1667 VkDescriptorSet *descriptorSetsOut)
1668 {
1669 ASSERT(valid());
1670 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1671 }
1672
1673 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1674 uint32_t descriptorSetCount,
1675 const VkDescriptorSet *descriptorSets)
1676 {
1677 ASSERT(valid());
1678 ASSERT(descriptorSetCount > 0);
1679 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1680 }
1681
1682 // Sampler implementation.
1683 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1684 {
1685 if (valid())
1686 {
1687 vkDestroySampler(device, mHandle, nullptr);
1688 mHandle = VK_NULL_HANDLE;
1689 }
1690 }
1691
1692 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1693 {
1694 ASSERT(!valid());
1695 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1696 }
1697
1698 // Event implementation.
1699 ANGLE_INLINE void Event::destroy(VkDevice device)
1700 {
1701 if (valid())
1702 {
1703 vkDestroyEvent(device, mHandle, nullptr);
1704 mHandle = VK_NULL_HANDLE;
1705 }
1706 }
1707
1708 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
1709 {
1710 ASSERT(!valid());
1711 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
1712 }
1713
1714 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
1715 {
1716 ASSERT(valid());
1717 return vkGetEventStatus(device, mHandle);
1718 }
1719
1720 ANGLE_INLINE VkResult Event::set(VkDevice device) const
1721 {
1722 ASSERT(valid());
1723 return vkSetEvent(device, mHandle);
1724 }
1725
1726 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
1727 {
1728 ASSERT(valid());
1729 return vkResetEvent(device, mHandle);
1730 }
1731
1732 // Fence implementation.
1733 ANGLE_INLINE void Fence::destroy(VkDevice device)
1734 {
1735 if (valid())
1736 {
1737 vkDestroyFence(device, mHandle, nullptr);
1738 mHandle = VK_NULL_HANDLE;
1739 }
1740 }
1741
1742 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
1743 {
1744 ASSERT(!valid());
1745 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
1746 }
1747
1748 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
1749 {
1750 ASSERT(valid());
1751 return vkResetFences(device, 1, &mHandle);
1752 }
1753
1754 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
1755 {
1756 ASSERT(valid());
1757 return vkGetFenceStatus(device, mHandle);
1758 }
1759
1760 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
1761 {
1762 ASSERT(valid());
1763 return vkWaitForFences(device, 1, &mHandle, true, timeout);
1764 }
1765
1766 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
1767 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
1768 {
1769 ASSERT(valid());
1770 return vkImportFenceFdKHR(device, &importFenceFdInfo);
1771 }
1772
1773 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
1774 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
1775 int *fdOut) const
1776 {
1777 ASSERT(valid());
1778 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
1779 }
1780
1781 // QueryPool implementation.
1782 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
1783 {
1784 if (valid())
1785 {
1786 vkDestroyQueryPool(device, mHandle, nullptr);
1787 mHandle = VK_NULL_HANDLE;
1788 }
1789 }
1790
1791 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
1792 {
1793 ASSERT(!valid());
1794 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
1795 }
1796
1797 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
1798 uint32_t firstQuery,
1799 uint32_t queryCount,
1800 size_t dataSize,
1801 void *data,
1802 VkDeviceSize stride,
1803 VkQueryResultFlags flags) const
1804 {
1805 ASSERT(valid());
1806 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
1807 flags);
1808 }
1809 } // namespace vk
1810 } // namespace rx
1811
1812 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
1813