1 /* 2 * Copyright 2022 Google LLC 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef skgpu_graphite_VulkanCommandBuffer_DEFINED 9 #define skgpu_graphite_VulkanCommandBuffer_DEFINED 10 11 #include "src/gpu/graphite/CommandBuffer.h" 12 13 #include "include/gpu/vk/VulkanTypes.h" 14 #include "src/gpu/graphite/DrawPass.h" 15 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h" 16 #include "src/gpu/graphite/vk/VulkanResourceProvider.h" 17 18 namespace skgpu::graphite { 19 20 class VulkanBuffer; 21 class VulkanDescriptorSet; 22 class VulkanSharedContext; 23 class VulkanTexture; 24 class Buffer; 25 26 class VulkanCommandBuffer final : public CommandBuffer { 27 public: 28 static std::unique_ptr<VulkanCommandBuffer> Make(const VulkanSharedContext*, 29 VulkanResourceProvider*, 30 Protected); 31 ~VulkanCommandBuffer() override; 32 33 bool setNewCommandBufferResources() override; 34 35 bool submit(VkQueue); 36 37 bool isFinished(); 38 39 void waitUntilFinished(); 40 41 void addBufferMemoryBarrier(const Resource* resource, 42 VkPipelineStageFlags srcStageMask, 43 VkPipelineStageFlags dstStageMask, 44 VkBufferMemoryBarrier* barrier); 45 void addBufferMemoryBarrier(VkPipelineStageFlags srcStageMask, 46 VkPipelineStageFlags dstStageMask, 47 VkBufferMemoryBarrier* barrier); 48 void addImageMemoryBarrier(const Resource*, 49 VkPipelineStageFlags srcStageMask, 50 VkPipelineStageFlags dstStageMask, 51 bool byRegion, 52 VkImageMemoryBarrier* barrier); 53 54 private: 55 VulkanCommandBuffer(VkCommandPool pool, 56 VkCommandBuffer primaryCommandBuffer, 57 const VulkanSharedContext* sharedContext, 58 VulkanResourceProvider* resourceProvider, 59 Protected); 60 resourceProvider()61 ResourceProvider* resourceProvider() const override { return fResourceProvider; } 62 63 void onResetCommandBuffer() override; 64 65 void begin(); 66 void end(); 67 68 void addWaitSemaphores(size_t numWaitSemaphores, 69 const BackendSemaphore* waitSemaphores) override; 70 void addSignalSemaphores(size_t numWaitSemaphores, 71 const BackendSemaphore* signalSemaphores) override; 72 void prepareSurfaceForStateUpdate(SkSurface* targetSurface, 73 const MutableTextureState* newState) override; 74 75 bool onAddRenderPass(const RenderPassDesc&, 76 SkIRect renderPassBounds, 77 const Texture* colorTexture, 78 const Texture* resolveTexture, 79 const Texture* depthStencilTexture, 80 SkIRect viewport, 81 const DrawPassList&) override; 82 83 bool beginRenderPass(const RenderPassDesc&, 84 SkIRect renderPassBounds, 85 const Texture* colorTexture, 86 const Texture* resolveTexture, 87 const Texture* depthStencilTexture); 88 void endRenderPass(); 89 90 void addDrawPass(const DrawPass*); 91 92 // Track descriptor changes for binding prior to draw calls 93 void recordBufferBindingInfo(const BindBufferInfo& info, UniformSlot); 94 // Either both arguments are non-null, or both must be null (to reset or handle just the 95 // dstCopy intrinsic w/o requiring a DrawPass command). 96 void recordTextureAndSamplerDescSet( 97 const DrawPass*, const DrawPassCommands::BindTexturesAndSamplers*); 98 99 void bindTextureSamplers(); 100 void bindUniformBuffers(); 101 void syncDescriptorSets(); 102 103 struct PushConstantInfo { 104 uint32_t fOffset; 105 uint32_t fSize; 106 VkShaderStageFlagBits fShaderStageFlagBits; 107 const void* fValues; 108 }; 109 void bindGraphicsPipeline(const GraphicsPipeline*); 110 void pushConstants(const PushConstantInfo&, VkPipelineLayout compatibleLayout); 111 112 void setBlendConstants(float* blendConstants); 113 void bindDrawBuffers(const BindBufferInfo& vertices, 114 const BindBufferInfo& instances, 115 const BindBufferInfo& indices, 116 const BindBufferInfo& indirect); 117 void bindVertexBuffers(const Buffer* vertexBuffer, size_t vertexOffset, 118 const Buffer* instanceBuffer, size_t instanceOffset); 119 void bindInputBuffer(const Buffer* buffer, VkDeviceSize offset, uint32_t binding); 120 void bindIndexBuffer(const Buffer* indexBuffer, size_t offset); 121 void bindIndirectBuffer(const Buffer* indirectBuffer, size_t offset); 122 void setScissor(const Scissor&); 123 void setScissor(const SkIRect&); 124 125 void draw(PrimitiveType type, unsigned int baseVertex, unsigned int vertexCount); 126 void drawIndexed(PrimitiveType type, unsigned int baseIndex, unsigned int indexCount, 127 unsigned int baseVertex); 128 void drawInstanced(PrimitiveType type, 129 unsigned int baseVertex, unsigned int vertexCount, 130 unsigned int baseInstance, unsigned int instanceCount); 131 void drawIndexedInstanced(PrimitiveType type, unsigned int baseIndex, 132 unsigned int indexCount, unsigned int baseVertex, 133 unsigned int baseInstance, unsigned int instanceCount); 134 void drawIndirect(PrimitiveType type); 135 void drawIndexedIndirect(PrimitiveType type); 136 137 // TODO: The virtuals in this class have not yet been implemented as we still haven't 138 // implemented the objects they use. 139 bool onAddComputePass(DispatchGroupSpan) override; 140 141 bool onCopyBufferToBuffer(const Buffer* srcBuffer, 142 size_t srcOffset, 143 const Buffer* dstBuffer, 144 size_t dstOffset, 145 size_t size) override; 146 bool onCopyTextureToBuffer(const Texture*, 147 SkIRect srcRect, 148 const Buffer*, 149 size_t bufferOffset, 150 size_t bufferRowBytes) override; 151 bool onCopyBufferToTexture(const Buffer*, 152 const Texture*, 153 const BufferTextureCopyData* copyData, 154 int count) override; 155 bool onCopyTextureToTexture(const Texture* src, 156 SkIRect srcRect, 157 const Texture* dst, 158 SkIPoint dstPoint, 159 int mipLevel) override; 160 161 bool pushConstants(VkShaderStageFlags stageFlags, 162 uint32_t offset, 163 uint32_t size, 164 const void* values); 165 166 bool onSynchronizeBufferToCpu(const Buffer*, bool* outDidResultInWork) override; 167 bool onClearBuffer(const Buffer*, size_t offset, size_t size) override; 168 169 enum BarrierType { 170 kBufferMemory_BarrierType, 171 kImageMemory_BarrierType 172 }; 173 void pipelineBarrier(const Resource* resource, 174 VkPipelineStageFlags srcStageMask, 175 VkPipelineStageFlags dstStageMask, 176 bool byRegion, 177 BarrierType barrierType, 178 void* barrier); 179 void submitPipelineBarriers(bool forSelfDependency = false); 180 181 bool loadMSAAFromResolve(const RenderPassDesc&, 182 VulkanTexture& resolveTexture, 183 SkISize dstDimensions, 184 SkIRect nativeBounds); 185 bool updateAndBindLoadMSAAInputAttachment(const VulkanTexture& resolveTexture); 186 void updateBuffer(const VulkanBuffer* buffer, 187 const void* data, 188 size_t dataSize, 189 size_t dstOffset = 0); 190 void nextSubpass(); 191 void setViewport(SkIRect viewport); 192 193 VkCommandPool fPool; 194 VkCommandBuffer fPrimaryCommandBuffer; 195 const VulkanSharedContext* fSharedContext; 196 VulkanResourceProvider* fResourceProvider; 197 198 // begin() has been called, but not end() 199 bool fActive = false; 200 // Track whether there is currently an active render pass (beginRenderPass has been called, but 201 // not endRenderPass) 202 bool fActiveRenderPass = false; 203 204 const VulkanGraphicsPipeline* fActiveGraphicsPipeline = nullptr; 205 206 VkFence fSubmitFence = VK_NULL_HANDLE; 207 208 // Current semaphores 209 skia_private::STArray<1, VkSemaphore> fWaitSemaphores; 210 skia_private::STArray<1, VkSemaphore> fSignalSemaphores; 211 212 // Tracking of memory barriers so that we can submit them all in a batch together. 213 skia_private::STArray<1, VkBufferMemoryBarrier> fBufferBarriers; 214 skia_private::STArray<2, VkImageMemoryBarrier> fImageBarriers; 215 bool fBarriersByRegion = false; 216 VkPipelineStageFlags fSrcStageMask = 0; 217 VkPipelineStageFlags fDstStageMask = 0; 218 219 // Track whether certain descriptor sets need to be bound 220 bool fBindUniformBuffers = false; 221 bool fBindTextureSamplers = false; 222 223 std::array<BindBufferInfo, VulkanGraphicsPipeline::kNumUniformBuffers> fUniformBuffersToBind; 224 VkDescriptorSet fTextureSamplerDescSetToBind = VK_NULL_HANDLE; 225 226 int fNumTextureSamplers = 0; 227 228 VkBuffer fBoundInputBuffers[VulkanGraphicsPipeline::kNumInputBuffers]; 229 size_t fBoundInputBufferOffsets[VulkanGraphicsPipeline::kNumInputBuffers]; 230 231 VkBuffer fBoundIndexBuffer = VK_NULL_HANDLE; 232 VkBuffer fBoundIndirectBuffer = VK_NULL_HANDLE; 233 size_t fBoundIndexBufferOffset = 0; 234 size_t fBoundIndirectBufferOffset = 0; 235 236 float fCachedBlendConstant[4]; 237 }; 238 239 } // namespace skgpu::graphite 240 241 #endif // skgpu_graphite_VulkanCommandBuffer_DEFINED 242