• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2022 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef skgpu_graphite_VulkanCommandBuffer_DEFINED
9 #define skgpu_graphite_VulkanCommandBuffer_DEFINED
10 
11 #include "src/gpu/graphite/CommandBuffer.h"
12 
13 #include "include/gpu/vk/VulkanTypes.h"
14 #include "src/gpu/graphite/DrawPass.h"
15 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h"
16 
17 namespace skgpu::graphite {
18 
19 class VulkanBuffer;
20 class VulkanDescriptorSet;
21 class VulkanResourceProvider;
22 class VulkanSharedContext;
23 class VulkanTexture;
24 class Buffer;
25 
26 class VulkanCommandBuffer final : public CommandBuffer {
27 public:
28     static std::unique_ptr<VulkanCommandBuffer> Make(const VulkanSharedContext*,
29                                                      VulkanResourceProvider*);
30     ~VulkanCommandBuffer() override;
31 
32     bool setNewCommandBufferResources() override;
33 
34     bool submit(VkQueue);
35 
36     bool isFinished();
37 
38     void waitUntilFinished();
39 
40     void addBufferMemoryBarrier(const Resource* resource,
41                                 VkPipelineStageFlags srcStageMask,
42                                 VkPipelineStageFlags dstStageMask,
43                                 VkBufferMemoryBarrier* barrier);
44     void addBufferMemoryBarrier(VkPipelineStageFlags srcStageMask,
45                                 VkPipelineStageFlags dstStageMask,
46                                 VkBufferMemoryBarrier* barrier);
47     void addImageMemoryBarrier(const Resource*,
48                                VkPipelineStageFlags srcStageMask,
49                                VkPipelineStageFlags dstStageMask,
50                                bool byRegion,
51                                VkImageMemoryBarrier* barrier);
52 
53 private:
54     VulkanCommandBuffer(VkCommandPool pool,
55                         VkCommandBuffer primaryCommandBuffer,
56                         const VulkanSharedContext* sharedContext,
57                         VulkanResourceProvider* resourceProvider);
58 
59     void onResetCommandBuffer() override;
60 
61     void begin();
62     void end();
63 
64     void addWaitSemaphores(size_t numWaitSemaphores,
65                            const BackendSemaphore* waitSemaphores) override;
66     void addSignalSemaphores(size_t numWaitSemaphores,
67                              const BackendSemaphore* signalSemaphores) override;
68     void prepareSurfaceForStateUpdate(SkSurface* targetSurface,
69                                       const MutableTextureState* newState) override;
70 
71     bool onAddRenderPass(const RenderPassDesc&,
72                         const Texture* colorTexture,
73                         const Texture* resolveTexture,
74                         const Texture* depthStencilTexture,
75                         SkRect viewport,
76                         const DrawPassList&) override;
77 
78     bool beginRenderPass(const RenderPassDesc&,
79                          const Texture* colorTexture,
80                          const Texture* resolveTexture,
81                          const Texture* depthStencilTexture);
82     void endRenderPass();
83 
84     void addDrawPass(const DrawPass*);
85 
86     // Track descriptor changes for binding prior to draw calls
87     void recordBufferBindingInfo(const BindUniformBufferInfo& info, UniformSlot);
88     void recordTextureAndSamplerDescSet(
89             const DrawPass&, const DrawPassCommands::BindTexturesAndSamplers&);
90 
91     void bindTextureSamplers();
92     void bindUniformBuffers();
93     void syncDescriptorSets();
94 
95     void bindGraphicsPipeline(const GraphicsPipeline*);
96     void setBlendConstants(float* blendConstants);
97     void bindDrawBuffers(const BindBufferInfo& vertices,
98                          const BindBufferInfo& instances,
99                          const BindBufferInfo& indices,
100                          const BindBufferInfo& indirect);
101     void bindVertexBuffers(const Buffer* vertexBuffer, size_t vertexOffset,
102                            const Buffer* instanceBuffer, size_t instanceOffset);
103     void bindInputBuffer(const Buffer* buffer, VkDeviceSize offset, uint32_t binding);
104     void bindIndexBuffer(const Buffer* indexBuffer, size_t offset);
105     void bindIndirectBuffer(const Buffer* indirectBuffer, size_t offset);
106     void setScissor(unsigned int left, unsigned int top,
107                     unsigned int width, unsigned int height);
108 
109     void draw(PrimitiveType type, unsigned int baseVertex, unsigned int vertexCount);
110     void drawIndexed(PrimitiveType type, unsigned int baseIndex, unsigned int indexCount,
111                      unsigned int baseVertex);
112     void drawInstanced(PrimitiveType type,
113                        unsigned int baseVertex, unsigned int vertexCount,
114                        unsigned int baseInstance, unsigned int instanceCount);
115     void drawIndexedInstanced(PrimitiveType type, unsigned int baseIndex,
116                               unsigned int indexCount, unsigned int baseVertex,
117                               unsigned int baseInstance, unsigned int instanceCount);
118     void drawIndirect(PrimitiveType type);
119     void drawIndexedIndirect(PrimitiveType type);
120 
121     // TODO: The virtuals in this class have not yet been implemented as we still haven't
122     // implemented the objects they use.
123     bool onAddComputePass(DispatchGroupSpan) override;
124 
125     bool onCopyBufferToBuffer(const Buffer* srcBuffer,
126                               size_t srcOffset,
127                               const Buffer* dstBuffer,
128                               size_t dstOffset,
129                               size_t size) override;
130     bool onCopyTextureToBuffer(const Texture*,
131                                SkIRect srcRect,
132                                const Buffer*,
133                                size_t bufferOffset,
134                                size_t bufferRowBytes) override;
135     bool onCopyBufferToTexture(const Buffer*,
136                                const Texture*,
137                                const BufferTextureCopyData* copyData,
138                                int count) override;
139     bool onCopyTextureToTexture(const Texture* src,
140                                 SkIRect srcRect,
141                                 const Texture* dst,
142                                 SkIPoint dstPoint,
143                                 int mipLevel) override;
144 
145     bool onSynchronizeBufferToCpu(const Buffer*, bool* outDidResultInWork) override;
146     bool onClearBuffer(const Buffer*, size_t offset, size_t size) override;
147 
148     enum BarrierType {
149         kBufferMemory_BarrierType,
150         kImageMemory_BarrierType
151     };
152     void pipelineBarrier(const Resource* resource,
153                          VkPipelineStageFlags srcStageMask,
154                          VkPipelineStageFlags dstStageMask,
155                          bool byRegion,
156                          BarrierType barrierType,
157                          void* barrier);
158     void submitPipelineBarriers(bool forSelfDependency = false);
159 
160     // Update the intrinsic constant uniform with the latest rtAdjust value as determined by a
161     // given viewport. The resource provider is responsible for finding a suitable buffer and
162     // managing its lifetime.
163     void updateRtAdjustUniform(const SkRect& viewport);
164 
165     bool updateLoadMSAAVertexBuffer();
166     bool loadMSAAFromResolve(const RenderPassDesc&,
167                              VulkanTexture& resolveTexture,
168                              SkISize dstDimensions);
169     bool updateAndBindLoadMSAAInputAttachment(const VulkanTexture& resolveTexture);
170     void updateBuffer(const VulkanBuffer* buffer,
171                       const void* data,
172                       size_t dataSize,
173                       size_t dstOffset = 0);
174     void nextSubpass();
175     void setViewport(const SkRect& viewport);
176 
177     VkCommandPool fPool;
178     VkCommandBuffer fPrimaryCommandBuffer;
179     const VulkanSharedContext* fSharedContext;
180     VulkanResourceProvider* fResourceProvider;
181 
182     // begin() has been called, but not end()
183     bool fActive = false;
184     // Track whether there is currently an active render pass (beginRenderPass has been called, but
185     // not endRenderPass)
186     bool fActiveRenderPass = false;
187 
188     const VulkanGraphicsPipeline* fActiveGraphicsPipeline = nullptr;
189 
190     VkFence fSubmitFence = VK_NULL_HANDLE;
191 
192     // Current semaphores
193     skia_private::STArray<1, VkSemaphore> fWaitSemaphores;
194     skia_private::STArray<1, VkSemaphore> fSignalSemaphores;
195 
196     // Tracking of memory barriers so that we can submit them all in a batch together.
197     skia_private::STArray<1, VkBufferMemoryBarrier> fBufferBarriers;
198     skia_private::STArray<2, VkImageMemoryBarrier> fImageBarriers;
199     bool fBarriersByRegion = false;
200     VkPipelineStageFlags fSrcStageMask = 0;
201     VkPipelineStageFlags fDstStageMask = 0;
202 
203     // Track whether certain descriptor sets need to be bound
204     bool fBindUniformBuffers = false;
205     bool fBindTextureSamplers = false;
206 
207     std::array<BindUniformBufferInfo, VulkanGraphicsPipeline::kNumUniformBuffers>
208             fUniformBuffersToBind;
209     VkDescriptorSet fTextureSamplerDescSetToBind = VK_NULL_HANDLE;
210 
211     int fNumTextureSamplers = 0;
212 
213     VkBuffer fBoundInputBuffers[VulkanGraphicsPipeline::kNumInputBuffers];
214     size_t fBoundInputBufferOffsets[VulkanGraphicsPipeline::kNumInputBuffers];
215 
216     VkBuffer fBoundIndexBuffer = VK_NULL_HANDLE;
217     VkBuffer fBoundIndirectBuffer = VK_NULL_HANDLE;
218     size_t fBoundIndexBufferOffset = 0;
219     size_t fBoundIndirectBufferOffset = 0;
220 
221     float fCachedBlendConstant[4];
222 };
223 
224 } // namespace skgpu::graphite
225 
226 #endif // skgpu_graphite_VulkanCommandBuffer_DEFINED
227 
228