1 /* 2 * Copyright 2019 Google LLC 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef GrRenderTask_DEFINED 9 #define GrRenderTask_DEFINED 10 11 #include "include/core/SkRefCnt.h" 12 #include "include/private/base/SkTArray.h" 13 #include "src/base/SkTInternalLList.h" 14 #include "src/gpu/ganesh/GrSurfaceProxyView.h" 15 #include "src/gpu/ganesh/GrTextureProxy.h" 16 #include "src/gpu/ganesh/GrTextureResolveManager.h" 17 18 class GrMockRenderTask; 19 class GrOpFlushState; 20 class GrResourceAllocator; 21 class GrTextureResolveRenderTask; 22 namespace skgpu { namespace v1 { class OpsTask; }} 23 24 // This class abstracts a task that targets a single GrSurfaceProxy, participates in the 25 // GrDrawingManager's DAG, and implements the onExecute method to modify its target proxy's 26 // contents. (e.g., an opsTask that executes a command buffer, a task to regenerate mipmaps, etc.) 27 class GrRenderTask : public SkRefCnt { 28 public: 29 GrRenderTask(); 30 SkDEBUGCODE(~GrRenderTask() override); 31 32 void makeClosed(GrRecordingContext*); 33 prePrepare(GrRecordingContext * context)34 void prePrepare(GrRecordingContext* context) { this->onPrePrepare(context); } 35 36 // These two methods are only invoked at flush time 37 void prepare(GrOpFlushState* flushState); execute(GrOpFlushState * flushState)38 bool execute(GrOpFlushState* flushState) { return this->onExecute(flushState); } 39 requiresExplicitCleanup()40 virtual bool requiresExplicitCleanup() const { return false; } 41 42 // Called when this class will survive a flush and needs to truncate its ops and start over. 43 // TODO: ultimately it should be invalid for an op list to survive a flush. 44 // https://bugs.chromium.org/p/skia/issues/detail?id=7111 endFlush(GrDrawingManager *)45 virtual void endFlush(GrDrawingManager*) {} 46 47 // This method "disowns" all the GrSurfaceProxies this RenderTask modifies. In 48 // practice this just means telling the drawingManager to forget the relevant 49 // mappings from surface proxy to last modifying rendertask. 50 virtual void disown(GrDrawingManager*); 51 isClosed()52 bool isClosed() const { return this->isSetFlag(kClosed_Flag); } 53 54 /** 55 * Make this task skippable. This must be used purely for optimization purposes 56 * at this point as not all tasks will actually skip their work. It would be better if we could 57 * detect tasks that can be skipped automatically. We'd need to support minimal flushes (i.e., 58 * only flush that which is required for SkSurfaces/SkImages) and the ability to detect 59 * "orphaned tasks" and clean them out from the DAG so they don't indefinitely accumulate. 60 * Finally, we'd probably have to track whether a proxy's backing store was imported or ever 61 * exported to the client in case the client is doing direct reads outside of Skia and thus 62 * may require tasks targeting the proxy to execute even if our DAG contains no reads. 63 */ 64 void makeSkippable(); 65 isSkippable()66 bool isSkippable() const { return this->isSetFlag(kSkippable_Flag); } 67 68 /** If true no other task should be reordered relative to this task. */ blocksReordering()69 bool blocksReordering() const { return this->isSetFlag(kBlocksReordering_Flag); } 70 71 /* 72 * Notify this GrRenderTask that it relies on the contents of 'dependedOn' 73 */ 74 void addDependency(GrDrawingManager*, GrSurfaceProxy* dependedOn, GrMipmapped, 75 GrTextureResolveManager, const GrCaps& caps); 76 77 /* 78 * Notify this GrRenderTask that it relies on the contents of all GrRenderTasks which otherTask 79 * depends on. 80 */ 81 void addDependenciesFromOtherTask(GrRenderTask* otherTask); 82 dependencies()83 SkSpan<GrRenderTask*> dependencies() { return SkSpan(fDependencies); } dependents()84 SkSpan<GrRenderTask*> dependents() { return SkSpan(fDependents); } 85 86 void replaceDependency(const GrRenderTask* toReplace, GrRenderTask* replaceWith); 87 void replaceDependent(const GrRenderTask* toReplace, GrRenderTask* replaceWith); 88 89 90 /* 91 * Does this renderTask depend on 'dependedOn'? 92 */ 93 bool dependsOn(const GrRenderTask* dependedOn) const; 94 uniqueID()95 uint32_t uniqueID() const { return fUniqueID; } numTargets()96 int numTargets() const { return fTargets.size(); } target(int i)97 GrSurfaceProxy* target(int i) const { return fTargets[i].get(); } 98 99 /* 100 * Safely cast this GrRenderTask to a OpsTask (if possible). 101 */ asOpsTask()102 virtual skgpu::v1::OpsTask* asOpsTask() { return nullptr; } 103 104 #if GR_TEST_UTILS 105 /* 106 * Dump out the GrRenderTask dependency DAG 107 */ 108 virtual void dump(const SkString& label, 109 SkString indent, 110 bool printDependencies, 111 bool close) const; 112 virtual const char* name() const = 0; 113 #endif 114 115 #ifdef SK_DEBUG numClips()116 virtual int numClips() const { return 0; } 117 118 virtual void visitProxies_debugOnly(const GrVisitProxyFunc&) const = 0; 119 visitTargetAndSrcProxies_debugOnly(const GrVisitProxyFunc & func)120 void visitTargetAndSrcProxies_debugOnly(const GrVisitProxyFunc& func) const { 121 this->visitProxies_debugOnly(func); 122 for (const sk_sp<GrSurfaceProxy>& target : fTargets) { 123 func(target.get(), GrMipmapped::kNo); 124 } 125 } 126 #endif 127 isUsed(GrSurfaceProxy * proxy)128 bool isUsed(GrSurfaceProxy* proxy) const { 129 for (const sk_sp<GrSurfaceProxy>& target : fTargets) { 130 if (target.get() == proxy) { 131 return true; 132 } 133 } 134 135 return this->onIsUsed(proxy); 136 } 137 138 // Feed proxy usage intervals to the GrResourceAllocator class 139 virtual void gatherProxyIntervals(GrResourceAllocator*) const = 0; 140 141 // In addition to just the GrSurface being allocated, has the stencil buffer been allocated (if 142 // it is required)? 143 bool isInstantiated() const; 144 145 // Used by GrRenderTaskCluster. 146 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrRenderTask); 147 148 #if GR_TEST_UTILS resolveTask()149 const GrTextureResolveRenderTask* resolveTask() const { return fTextureResolveTask; } 150 #endif 151 protected: 152 SkDEBUGCODE(bool deferredProxiesAreInstantiated() const;) 153 154 // Add a target surface proxy to the list of targets for this task. 155 // This also informs the drawing manager to update the lastRenderTask association. 156 void addTarget(GrDrawingManager*, sk_sp<GrSurfaceProxy>); 157 158 // Helper that adds the proxy owned by a view. addTarget(GrDrawingManager * dm,const GrSurfaceProxyView & view)159 void addTarget(GrDrawingManager* dm, const GrSurfaceProxyView& view) { 160 this->addTarget(dm, view.refProxy()); 161 } 162 163 enum class ExpectedOutcome : bool { 164 kTargetUnchanged, 165 kTargetDirty, 166 }; 167 168 // Performs any work to finalize this renderTask prior to execution. If returning 169 // ExpectedOutcome::kTargetDirty, the caller is also responsible to fill out the area it will 170 // modify in targetUpdateBounds. 171 // 172 // targetUpdateBounds must not extend beyond the proxy bounds. 173 virtual ExpectedOutcome onMakeClosed(GrRecordingContext*, SkIRect* targetUpdateBounds) = 0; 174 175 SkSTArray<1, sk_sp<GrSurfaceProxy>> fTargets; 176 177 // List of texture proxies whose contents are being prepared on a worker thread 178 // TODO: this list exists so we can fire off the proper upload when an renderTask begins 179 // executing. Can this be replaced? 180 SkTArray<GrTextureProxy*, true> fDeferredProxies; 181 182 enum Flags { 183 kClosed_Flag = 0x01, //!< This task can't accept any more dependencies. 184 kDisowned_Flag = 0x02, //!< This task is disowned by its GrDrawingManager. 185 kSkippable_Flag = 0x04, //!< This task is skippable. 186 kAtlas_Flag = 0x08, //!< This task is atlas. 187 kBlocksReordering_Flag = 0x10, //!< No task can be reordered with respect to this task. 188 189 kWasOutput_Flag = 0x20, //!< Flag for topological sorting 190 kTempMark_Flag = 0x40, //!< Flag for topological sorting 191 }; 192 setFlag(uint32_t flag)193 void setFlag(uint32_t flag) { 194 fFlags |= flag; 195 } 196 resetFlag(uint32_t flag)197 void resetFlag(uint32_t flag) { 198 fFlags &= ~flag; 199 } 200 isSetFlag(uint32_t flag)201 bool isSetFlag(uint32_t flag) const { 202 return SkToBool(fFlags & flag); 203 } 204 setIndex(uint32_t index)205 void setIndex(uint32_t index) { 206 SkASSERT(!this->isSetFlag(kWasOutput_Flag)); 207 SkASSERT(index < (1 << 25)); 208 fFlags |= index << 7; 209 } 210 getIndex()211 uint32_t getIndex() const { 212 SkASSERT(this->isSetFlag(kWasOutput_Flag)); 213 return fFlags >> 7; 214 } 215 216 private: 217 // for TopoSortTraits, fTextureResolveTask, addDependency 218 friend class GrDrawingManager; 219 friend class GrMockRenderTask; 220 221 // Derived classes can override to indicate usage of proxies _other than target proxies_. 222 // GrRenderTask itself will handle checking the target proxies. 223 virtual bool onIsUsed(GrSurfaceProxy*) const = 0; 224 225 void addDependency(GrRenderTask* dependedOn); 226 void addDependent(GrRenderTask* dependent); 227 SkDEBUGCODE(bool isDependent(const GrRenderTask* dependent) const;) 228 SkDEBUGCODE(void validate() const;) 229 230 static uint32_t CreateUniqueID(); 231 232 struct TopoSortTraits { GetIndexTopoSortTraits233 static uint32_t GetIndex(GrRenderTask* renderTask) { 234 return renderTask->getIndex(); 235 } OutputTopoSortTraits236 static void Output(GrRenderTask* renderTask, uint32_t index) { 237 renderTask->setIndex(index); 238 renderTask->setFlag(kWasOutput_Flag); 239 } WasOutputTopoSortTraits240 static bool WasOutput(const GrRenderTask* renderTask) { 241 return renderTask->isSetFlag(kWasOutput_Flag); 242 } SetTempMarkTopoSortTraits243 static void SetTempMark(GrRenderTask* renderTask) { 244 renderTask->setFlag(kTempMark_Flag); 245 } ResetTempMarkTopoSortTraits246 static void ResetTempMark(GrRenderTask* renderTask) { 247 renderTask->resetFlag(kTempMark_Flag); 248 } IsTempMarkedTopoSortTraits249 static bool IsTempMarked(const GrRenderTask* renderTask) { 250 return renderTask->isSetFlag(kTempMark_Flag); 251 } NumDependenciesTopoSortTraits252 static int NumDependencies(const GrRenderTask* renderTask) { 253 return renderTask->fDependencies.size(); 254 } DependencyTopoSortTraits255 static GrRenderTask* Dependency(GrRenderTask* renderTask, int index) { 256 return renderTask->fDependencies[index]; 257 } 258 }; 259 onMakeSkippable()260 virtual void onMakeSkippable() {} onPrePrepare(GrRecordingContext *)261 virtual void onPrePrepare(GrRecordingContext*) {} // Only OpsTask currently overrides this onPrepare(GrOpFlushState *)262 virtual void onPrepare(GrOpFlushState*) {} // OpsTask and GrDDLTask override this 263 virtual bool onExecute(GrOpFlushState* flushState) = 0; 264 265 const uint32_t fUniqueID; 266 uint32_t fFlags; 267 268 // 'this' GrRenderTask relies on the output of the GrRenderTasks in 'fDependencies' 269 SkSTArray<1, GrRenderTask*, true> fDependencies; 270 // 'this' GrRenderTask's output is relied on by the GrRenderTasks in 'fDependents' 271 SkSTArray<1, GrRenderTask*, true> fDependents; 272 273 // For performance reasons, we should perform texture resolves back-to-back as much as possible. 274 // (http://skbug.com/9406). To accomplish this, we make and reuse one single resolve task for 275 // each render task, then add it as a dependency during makeClosed(). 276 GrTextureResolveRenderTask* fTextureResolveTask = nullptr; 277 278 SkDEBUGCODE(GrDrawingManager *fDrawingMgr = nullptr;) 279 }; 280 281 #endif 282