• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2018 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef GrCCPathCache_DEFINED
9 #define GrCCPathCache_DEFINED
10 
11 #include "include/private/SkTHash.h"
12 #include "src/core/SkExchange.h"
13 #include "src/core/SkTInternalLList.h"
14 #include "src/gpu/ccpr/GrCCAtlas.h"
15 #include "src/gpu/ccpr/GrCCPathProcessor.h"
16 #include "src/gpu/geometry/GrShape.h"
17 
18 class GrCCPathCacheEntry;
19 class GrShape;
20 
21 /**
22  * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
23  * are only given one entry in the cache, so any time they are accessed with a different matrix, the
24  * old entry gets evicted.
25  */
26 class GrCCPathCache {
27 public:
28     GrCCPathCache(uint32_t contextUniqueID);
29     ~GrCCPathCache();
30 
31     class Key : public SkPathRef::GenIDChangeListener {
32     public:
33         static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32,
34                                const void* data = nullptr);
35 
pathCacheUniqueID()36         uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
37 
dataSizeInBytes()38         int dataSizeInBytes() const { return fDataSizeInBytes; }
39         const uint32_t* data() const;
40 
resetDataCountU32(int dataCountU32)41         void resetDataCountU32(int dataCountU32) {
42             SkASSERT(dataCountU32 <= fDataReserveCountU32);
43             fDataSizeInBytes = dataCountU32 * sizeof(uint32_t);
44         }
45         uint32_t* data();
46 
47         bool operator==(const Key& that) const {
48             return fDataSizeInBytes == that.fDataSizeInBytes &&
49                    !memcmp(this->data(), that.data(), fDataSizeInBytes);
50         }
51 
52         // Called when our corresponding path is modified or deleted. Not threadsafe.
53         void onChange() override;
54 
55     private:
Key(uint32_t pathCacheUniqueID,int dataCountU32)56         Key(uint32_t pathCacheUniqueID, int dataCountU32)
57                 : fPathCacheUniqueID(pathCacheUniqueID)
58                 , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t))
59                 SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) {
60             SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID);
61         }
62 
63         const uint32_t fPathCacheUniqueID;
64         int fDataSizeInBytes;
65         SkDEBUGCODE(const int fDataReserveCountU32);
66         // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey
67         // provides access to it.
68     };
69 
70     // Stores the components of a transformation that affect a path mask (i.e. everything but
71     // integer translation). During construction, any integer portions of the matrix's translate are
72     // shaved off and returned to the caller. The caller is responsible for those integer shifts.
73     struct MaskTransform {
74         MaskTransform(const SkMatrix& m, SkIVector* shift);
75         float fMatrix2x2[4];
76 #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
77         // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
78         // On AOSP we follow after HWUI and ignore the subpixel translate.
79         float fSubpixelTranslate[2];
80 #endif
81     };
82 
83     // Represents a ref on a GrCCPathCacheEntry that should only be used during the current flush.
84     class OnFlushEntryRef : SkNoncopyable {
85     public:
86         static OnFlushEntryRef OnFlushRef(GrCCPathCacheEntry*);
87         OnFlushEntryRef() = default;
OnFlushEntryRef(OnFlushEntryRef && ref)88         OnFlushEntryRef(OnFlushEntryRef&& ref) : fEntry(skstd::exchange(ref.fEntry, nullptr)) {}
89         ~OnFlushEntryRef();
90 
get()91         GrCCPathCacheEntry* get() const { return fEntry; }
92         GrCCPathCacheEntry* operator->() const { return fEntry; }
93         GrCCPathCacheEntry& operator*() const { return *fEntry; }
94         explicit operator bool() const { return fEntry; }
95         void operator=(OnFlushEntryRef&& ref) { fEntry = skstd::exchange(ref.fEntry, nullptr); }
96 
97     private:
OnFlushEntryRef(GrCCPathCacheEntry * entry)98         OnFlushEntryRef(GrCCPathCacheEntry* entry) : fEntry(entry) {}
99         GrCCPathCacheEntry* fEntry = nullptr;
100     };
101 
102     // Finds an entry in the cache that matches the given shape and transformation matrix.
103     // 'maskShift' is filled with an integer post-translate that the caller must apply when drawing
104     // the entry's mask to the device.
105     //
106     // NOTE: Shapes are only given one entry, so any time they are accessed with a new
107     // transformation, the old entry gets evicted.
108     OnFlushEntryRef find(GrOnFlushResourceProvider*, const GrShape&,
109                          const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix,
110                          SkIVector* maskShift);
111 
112     void doPreFlushProcessing();
113 
114     void purgeEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point& purgeTime);
115 
116     // As we evict entries from our local path cache, we accumulate a list of invalidated atlas
117     // textures. This call purges the invalidated atlas textures from the mainline GrResourceCache.
118     // This call is available with two different "provider" objects, to accomodate whatever might
119     // be available at the callsite.
120     void purgeInvalidatedAtlasTextures(GrOnFlushResourceProvider*);
121     void purgeInvalidatedAtlasTextures(GrProxyProvider*);
122 
123 private:
124     // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
125     // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
126     // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting
127     // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup.
128     class HashNode : SkNoncopyable {
129     public:
130         static const Key& GetKey(const HashNode&);
Hash(const Key & key)131         inline static uint32_t Hash(const Key& key) {
132             return GrResourceKeyHash(key.data(), key.dataSizeInBytes());
133         }
134 
135         HashNode() = default;
136         HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&);
HashNode(HashNode && node)137         HashNode(HashNode&& node)
138                 : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) {
139             SkASSERT(!node.fEntry);
140         }
141 
142         ~HashNode();
143 
144         void operator=(HashNode&& node);
145 
entry()146         GrCCPathCacheEntry* entry() const { return fEntry.get(); }
147 
148     private:
149         GrCCPathCache* fPathCache = nullptr;
150         sk_sp<GrCCPathCacheEntry> fEntry;
151     };
152 
quickPerFlushTimestamp()153     GrStdSteadyClock::time_point quickPerFlushTimestamp() {
154         // time_point::min() means it's time to update fPerFlushTimestamp with a newer clock read.
155         if (GrStdSteadyClock::time_point::min() == fPerFlushTimestamp) {
156             fPerFlushTimestamp = GrStdSteadyClock::now();
157         }
158         return fPerFlushTimestamp;
159     }
160 
161     void evict(const GrCCPathCache::Key&, GrCCPathCacheEntry* = nullptr);
162 
163     // Evicts all the cache entries whose keys have been queued up in fInvalidatedKeysInbox via
164     // SkPath listeners.
165     void evictInvalidatedCacheKeys();
166 
167     const uint32_t fContextUniqueID;
168 
169     SkTHashTable<HashNode, const Key&> fHashTable;
170     SkTInternalLList<GrCCPathCacheEntry> fLRU;
171     SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox;
172     sk_sp<Key> fScratchKey;  // Reused for creating a temporary key in the find() method.
173 
174     // We only read the clock once per flush, and cache it in this variable. This prevents us from
175     // excessive clock reads for cache timestamps that might degrade performance.
176     GrStdSteadyClock::time_point fPerFlushTimestamp = GrStdSteadyClock::time_point::min();
177 
178     // As we evict entries from our local path cache, we accumulate lists of invalidated atlas
179     // textures in these two members. We hold these until we purge them from the GrResourceCache
180     // (e.g. via purgeInvalidatedAtlasTextures().)
181     SkSTArray<4, sk_sp<GrTextureProxy>> fInvalidatedProxies;
182     SkSTArray<4, GrUniqueKey> fInvalidatedProxyUniqueKeys;
183 
184     friend class GrCCCachedAtlas;  // To append to fInvalidatedProxies, fInvalidatedProxyUniqueKeys.
185 
186 public:
187     const SkTHashTable<HashNode, const Key&>& testingOnly_getHashTable() const;
188     const SkTInternalLList<GrCCPathCacheEntry>& testingOnly_getLRU() const;
189 };
190 
191 /**
192  * This class stores all the data necessary to draw a specific path + matrix combination from their
193  * corresponding cached atlas.
194  */
195 class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> {
196 public:
197     SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
198 
~GrCCPathCacheEntry()199     ~GrCCPathCacheEntry() {
200         SkASSERT(this->hasBeenEvicted());  // Should have called GrCCPathCache::evict().
201         SkASSERT(!fCachedAtlas);
202         SkASSERT(0 == fOnFlushRefCnt);
203     }
204 
cacheKey()205     const GrCCPathCache::Key& cacheKey() const { SkASSERT(fCacheKey); return *fCacheKey; }
206 
207     // The number of flushes during which this specific entry (path + matrix combination) has been
208     // pulled from the path cache. If a path is pulled from the cache more than once in a single
209     // flush, the hit count is only incremented once.
210     //
211     // If the entry did not previously exist, its hit count will be 1.
hitCount()212     int hitCount() const { return fHitCount; }
213 
214     // The accumulative region of the path that has been drawn during the lifetime of this cache
215     // entry (as defined by the 'clippedDrawBounds' parameter for GrCCPathCache::find).
hitRect()216     const SkIRect& hitRect() const { return fHitRect; }
217 
cachedAtlas()218     const GrCCCachedAtlas* cachedAtlas() const { return fCachedAtlas.get(); }
219 
devIBounds()220     const SkIRect& devIBounds() const { return fDevIBounds; }
width()221     int width() const { return fDevIBounds.width(); }
height()222     int height() const { return fDevIBounds.height(); }
223 
224     enum class ReleaseAtlasResult : bool {
225         kNone,
226         kDidInvalidateFromCache
227     };
228 
229     // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
230     // The caller will stash this atlas texture away after drawing, and during the next flush,
231     // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
232     void setCoverageCountAtlas(
233             GrOnFlushResourceProvider*, GrCCAtlas*, const SkIVector& atlasOffset,
234             const GrOctoBounds& octoBounds, const SkIRect& devIBounds, const SkIVector& maskShift);
235 
236     // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
237     // the entry at the new atlas and updates the GrCCCCachedAtlas data.
238     ReleaseAtlasResult upgradeToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
239                                                      GrCCAtlas*, const SkIVector& newAtlasOffset);
240 
241 private:
242     using MaskTransform = GrCCPathCache::MaskTransform;
243 
GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey,const MaskTransform & maskTransform)244     GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform)
245             : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) {
246     }
247 
hasBeenEvicted()248     bool hasBeenEvicted() const { return fCacheKey->shouldUnregisterFromPath(); }
249 
250     // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
251     // resource cache if needed.
252     ReleaseAtlasResult releaseCachedAtlas(GrCCPathCache*);
253 
254     sk_sp<GrCCPathCache::Key> fCacheKey;
255     GrStdSteadyClock::time_point fTimestamp;
256     int fHitCount = 0;
257     SkIRect fHitRect = SkIRect::MakeEmpty();
258 
259     sk_sp<GrCCCachedAtlas> fCachedAtlas;
260     SkIVector fAtlasOffset;
261 
262     MaskTransform fMaskTransform;
263     GrOctoBounds fOctoBounds;
264     SkIRect fDevIBounds;
265 
266     int fOnFlushRefCnt = 0;
267 
268     friend class GrCCPathCache;
269     friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
270                                                  uint64_t color, GrFillRule);  // To access data.
271 
272 public:
273     int testingOnly_peekOnFlushRefCnt() const;
274 };
275 
276 /**
277  * Encapsulates the data for an atlas whose texture is stored in the mainline GrResourceCache. Many
278  * instances of GrCCPathCacheEntry will reference the same GrCCCachedAtlas.
279  *
280  * We use this object to track the percentage of the original atlas pixels that could still ever
281  * potentially be reused (i.e., those which still represent an extant path). When the percentage
282  * of useful pixels drops below 50%, we purge the entire texture from the resource cache.
283  *
284  * This object also holds a ref on the atlas's actual texture proxy during flush. When
285  * fOnFlushRefCnt decrements back down to zero, we release fOnFlushProxy and reset it back to null.
286  */
287 class GrCCCachedAtlas : public GrNonAtomicRef<GrCCCachedAtlas> {
288 public:
289     using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
290 
GrCCCachedAtlas(GrCCAtlas::CoverageType type,const GrUniqueKey & textureKey,sk_sp<GrTextureProxy> onFlushProxy)291     GrCCCachedAtlas(GrCCAtlas::CoverageType type, const GrUniqueKey& textureKey,
292                     sk_sp<GrTextureProxy> onFlushProxy)
293             : fCoverageType(type)
294             , fTextureKey(textureKey)
295             , fOnFlushProxy(std::move(onFlushProxy)) {}
296 
~GrCCCachedAtlas()297     ~GrCCCachedAtlas() {
298         SkASSERT(!fOnFlushProxy);
299         SkASSERT(!fOnFlushRefCnt);
300     }
301 
coverageType()302     GrCCAtlas::CoverageType coverageType() const  { return fCoverageType; }
textureKey()303     const GrUniqueKey& textureKey() const { return fTextureKey; }
304 
getOnFlushProxy()305     GrTextureProxy* getOnFlushProxy() const { return fOnFlushProxy.get(); }
306 
setOnFlushProxy(sk_sp<GrTextureProxy> proxy)307     void setOnFlushProxy(sk_sp<GrTextureProxy> proxy) {
308         SkASSERT(!fOnFlushProxy);
309         fOnFlushProxy = std::move(proxy);
310     }
311 
addPathPixels(int numPixels)312     void addPathPixels(int numPixels) { fNumPathPixels += numPixels; }
313     ReleaseAtlasResult invalidatePathPixels(GrCCPathCache*, int numPixels);
314 
peekOnFlushRefCnt()315     int peekOnFlushRefCnt() const { return fOnFlushRefCnt; }
316     void incrOnFlushRefCnt(int count = 1) const {
317         SkASSERT(count > 0);
318         SkASSERT(fOnFlushProxy);
319         fOnFlushRefCnt += count;
320     }
321     void decrOnFlushRefCnt(int count = 1) const;
322 
323 private:
324     const GrCCAtlas::CoverageType fCoverageType;
325     const GrUniqueKey fTextureKey;
326 
327     int fNumPathPixels = 0;
328     int fNumInvalidatedPathPixels = 0;
329     bool fIsInvalidatedFromResourceCache = false;
330 
331     mutable sk_sp<GrTextureProxy> fOnFlushProxy;
332     mutable int fOnFlushRefCnt = 0;
333 
334 public:
335     int testingOnly_peekOnFlushRefCnt() const;
336 };
337 
338 
HashNode(GrCCPathCache * pathCache,sk_sp<Key> key,const MaskTransform & m,const GrShape & shape)339 inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key,
340                                          const MaskTransform& m, const GrShape& shape)
341         : fPathCache(pathCache)
342         , fEntry(new GrCCPathCacheEntry(key, m)) {
343     SkASSERT(shape.hasUnstyledKey());
344     shape.addGenIDChangeListener(std::move(key));
345 }
346 
GetKey(const GrCCPathCache::HashNode & node)347 inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey(
348         const GrCCPathCache::HashNode& node) {
349     return *node.entry()->fCacheKey;
350 }
351 
~HashNode()352 inline GrCCPathCache::HashNode::~HashNode() {
353     SkASSERT(!fEntry || fEntry->hasBeenEvicted());  // Should have called GrCCPathCache::evict().
354 }
355 
356 inline void GrCCPathCache::HashNode::operator=(HashNode&& node) {
357     SkASSERT(!fEntry || fEntry->hasBeenEvicted());  // Should have called GrCCPathCache::evict().
358     fEntry = skstd::exchange(node.fEntry, nullptr);
359 }
360 
set(const GrCCPathCacheEntry & entry,const SkIVector & shift,uint64_t color,GrFillRule fillRule)361 inline void GrCCPathProcessor::Instance::set(
362         const GrCCPathCacheEntry& entry, const SkIVector& shift, uint64_t color,
363         GrFillRule fillRule) {
364     float dx = (float)shift.fX, dy = (float)shift.fY;
365     this->set(entry.fOctoBounds.makeOffset(dx, dy), entry.fAtlasOffset - shift, color, fillRule);
366 }
367 
368 #endif
369