1 /* 2 * Copyright 2022 Google LLC 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef skgpu_graphite_VulkanCommandBuffer_DEFINED 9 #define skgpu_graphite_VulkanCommandBuffer_DEFINED 10 11 #include "src/gpu/graphite/CommandBuffer.h" 12 13 #include "include/gpu/vk/VulkanTypes.h" 14 #include "src/gpu/graphite/DrawPass.h" 15 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h" 16 #include "src/gpu/graphite/vk/VulkanResourceProvider.h" 17 18 namespace skgpu::graphite { 19 20 class Buffer; 21 class VulkanBuffer; 22 class VulkanDescriptorSet; 23 class VulkanSharedContext; 24 class VulkanTexture; 25 26 class VulkanCommandBuffer final : public CommandBuffer { 27 public: 28 static std::unique_ptr<VulkanCommandBuffer> Make(const VulkanSharedContext*, 29 VulkanResourceProvider*, 30 Protected); 31 ~VulkanCommandBuffer() override; 32 33 bool setNewCommandBufferResources() override; 34 35 bool submit(VkQueue); 36 37 bool isFinished(); 38 39 void waitUntilFinished(); 40 41 void addBufferMemoryBarrier(const Resource* resource, 42 VkPipelineStageFlags srcStageMask, 43 VkPipelineStageFlags dstStageMask, 44 VkBufferMemoryBarrier* barrier); 45 void addBufferMemoryBarrier(VkPipelineStageFlags srcStageMask, 46 VkPipelineStageFlags dstStageMask, 47 VkBufferMemoryBarrier* barrier); 48 void addImageMemoryBarrier(const Resource*, 49 VkPipelineStageFlags srcStageMask, 50 VkPipelineStageFlags dstStageMask, 51 bool byRegion, 52 VkImageMemoryBarrier* barrier); 53 54 private: 55 VulkanCommandBuffer(VkCommandPool pool, 56 VkCommandBuffer primaryCommandBuffer, 57 const VulkanSharedContext* sharedContext, 58 VulkanResourceProvider* resourceProvider, 59 Protected); 60 resourceProvider()61 ResourceProvider* resourceProvider() const override { return fResourceProvider; } 62 63 void onResetCommandBuffer() override; 64 65 void begin(); 66 void end(); 67 68 void addWaitSemaphores(size_t numWaitSemaphores, 69 const BackendSemaphore* waitSemaphores) override; 70 void addSignalSemaphores(size_t numWaitSemaphores, 71 const BackendSemaphore* signalSemaphores) override; 72 void prepareSurfaceForStateUpdate(SkSurface* targetSurface, 73 const MutableTextureState* newState) override; 74 75 bool onAddRenderPass(const RenderPassDesc&, 76 SkIRect renderPassBounds, 77 const Texture* colorTexture, 78 const Texture* resolveTexture, 79 const Texture* depthStencilTexture, 80 SkIRect viewport, 81 const DrawPassList&) override; 82 83 bool beginRenderPass(const RenderPassDesc&, 84 SkIRect renderPassBounds, 85 const Texture* colorTexture, 86 const Texture* resolveTexture, 87 const Texture* depthStencilTexture); 88 void endRenderPass(); 89 90 void addDrawPass(const DrawPass*); 91 92 // Track descriptor changes for binding prior to draw calls 93 void recordBufferBindingInfo(const BindBufferInfo& info, UniformSlot); 94 // Either both arguments are non-null, or both must be null (to reset or handle just the 95 // dstCopy intrinsic w/o requiring a DrawPass command). 96 void recordTextureAndSamplerDescSet( 97 const DrawPass*, const DrawPassCommands::BindTexturesAndSamplers*); 98 99 bool updateAndBindInputAttachment(const VulkanTexture&, const int setIdx); 100 void bindTextureSamplers(); 101 void bindUniformBuffers(); 102 void syncDescriptorSets(); 103 104 struct PushConstantInfo { 105 uint32_t fOffset; 106 uint32_t fSize; 107 VkShaderStageFlagBits fShaderStageFlagBits; 108 const void* fValues; 109 }; 110 void bindGraphicsPipeline(const GraphicsPipeline*); 111 void pushConstants(const PushConstantInfo&, VkPipelineLayout compatibleLayout); 112 113 void setBlendConstants(float* blendConstants); 114 void bindDrawBuffers(const BindBufferInfo& vertices, 115 const BindBufferInfo& instances, 116 const BindBufferInfo& indices, 117 const BindBufferInfo& indirect); 118 void bindVertexBuffers(const Buffer* vertexBuffer, size_t vertexOffset, 119 const Buffer* instanceBuffer, size_t instanceOffset); 120 void bindInputBuffer(const Buffer* buffer, VkDeviceSize offset, uint32_t binding); 121 void bindIndexBuffer(const Buffer* indexBuffer, size_t offset); 122 void bindIndirectBuffer(const Buffer* indirectBuffer, size_t offset); 123 void setScissor(const Scissor&); 124 void setScissor(const SkIRect&); 125 126 void draw(PrimitiveType type, unsigned int baseVertex, unsigned int vertexCount); 127 void drawIndexed(PrimitiveType type, unsigned int baseIndex, unsigned int indexCount, 128 unsigned int baseVertex); 129 void drawInstanced(PrimitiveType type, 130 unsigned int baseVertex, unsigned int vertexCount, 131 unsigned int baseInstance, unsigned int instanceCount); 132 void drawIndexedInstanced(PrimitiveType type, unsigned int baseIndex, 133 unsigned int indexCount, unsigned int baseVertex, 134 unsigned int baseInstance, unsigned int instanceCount); 135 void drawIndirect(PrimitiveType type); 136 void drawIndexedIndirect(PrimitiveType type); 137 void addBarrier(BarrierType type); 138 139 // TODO: The virtuals in this class have not yet been implemented as we still haven't 140 // implemented the objects they use. 141 bool onAddComputePass(DispatchGroupSpan) override; 142 143 bool onCopyBufferToBuffer(const Buffer* srcBuffer, 144 size_t srcOffset, 145 const Buffer* dstBuffer, 146 size_t dstOffset, 147 size_t size) override; 148 bool onCopyTextureToBuffer(const Texture*, 149 SkIRect srcRect, 150 const Buffer*, 151 size_t bufferOffset, 152 size_t bufferRowBytes) override; 153 bool onCopyBufferToTexture(const Buffer*, 154 const Texture*, 155 const BufferTextureCopyData* copyData, 156 int count) override; 157 bool onCopyTextureToTexture(const Texture* src, 158 SkIRect srcRect, 159 const Texture* dst, 160 SkIPoint dstPoint, 161 int mipLevel) override; 162 163 bool pushConstants(VkShaderStageFlags stageFlags, 164 uint32_t offset, 165 uint32_t size, 166 const void* values); 167 168 bool onSynchronizeBufferToCpu(const Buffer*, bool* outDidResultInWork) override; 169 bool onClearBuffer(const Buffer*, size_t offset, size_t size) override; 170 171 enum PipelineBarrierType { 172 kBufferMemory_BarrierType, 173 kImageMemory_BarrierType 174 }; 175 void pipelineBarrier(const Resource* resource, 176 VkPipelineStageFlags srcStageMask, 177 VkPipelineStageFlags dstStageMask, 178 bool byRegion, 179 PipelineBarrierType barrierType, 180 void* barrier); 181 void submitPipelineBarriers(bool forSelfDependency = false); 182 183 bool loadMSAAFromResolve(const RenderPassDesc&, 184 VulkanTexture& resolveTexture, 185 SkISize dstDimensions, 186 SkIRect nativeBounds); 187 void nextSubpass(); 188 void setViewport(SkIRect viewport); 189 190 VkCommandPool fPool; 191 VkCommandBuffer fPrimaryCommandBuffer; 192 const VulkanSharedContext* fSharedContext; 193 VulkanResourceProvider* fResourceProvider; 194 195 // begin() has been called, but not end() 196 bool fActive = false; 197 // Track whether there is currently an active render pass (beginRenderPass has been called, but 198 // not endRenderPass) 199 bool fActiveRenderPass = false; 200 201 const VulkanGraphicsPipeline* fActiveGraphicsPipeline = nullptr; 202 203 VkFence fSubmitFence = VK_NULL_HANDLE; 204 205 // Current semaphores 206 skia_private::STArray<1, VkSemaphore> fWaitSemaphores; 207 skia_private::STArray<1, VkSemaphore> fSignalSemaphores; 208 209 // Tracking of memory barriers so that we can submit them all in a batch together. 210 skia_private::STArray<1, VkBufferMemoryBarrier> fBufferBarriers; 211 skia_private::STArray<2, VkImageMemoryBarrier> fImageBarriers; 212 bool fBarriersByRegion = false; 213 VkPipelineStageFlags fSrcStageMask = 0; 214 VkPipelineStageFlags fDstStageMask = 0; 215 216 // Track whether certain descriptor sets need to be bound 217 bool fBindUniformBuffers = false; 218 bool fBindTextureSamplers = false; 219 220 std::array<BindBufferInfo, VulkanGraphicsPipeline::kNumUniformBuffers> fUniformBuffersToBind; 221 VkDescriptorSet fTextureSamplerDescSetToBind = VK_NULL_HANDLE; 222 223 int fNumTextureSamplers = 0; 224 225 VkBuffer fBoundInputBuffers[VulkanGraphicsPipeline::kNumInputBuffers]; 226 size_t fBoundInputBufferOffsets[VulkanGraphicsPipeline::kNumInputBuffers]; 227 228 VkBuffer fBoundIndexBuffer = VK_NULL_HANDLE; 229 VkBuffer fBoundIndirectBuffer = VK_NULL_HANDLE; 230 size_t fBoundIndexBufferOffset = 0; 231 size_t fBoundIndirectBufferOffset = 0; 232 233 float fCachedBlendConstant[4]; 234 }; 235 236 } // namespace skgpu::graphite 237 238 #endif // skgpu_graphite_VulkanCommandBuffer_DEFINED 239