• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2018 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef GrCCPathCache_DEFINED
9 #define GrCCPathCache_DEFINED
10 
11 #include "GrShape.h"
12 #include "SkExchange.h"
13 #include "SkTHash.h"
14 #include "SkTInternalLList.h"
15 #include "ccpr/GrCCAtlas.h"
16 #include "ccpr/GrCCPathProcessor.h"
17 
18 class GrCCPathCacheEntry;
19 class GrShape;
20 
21 /**
22  * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
23  * are only given one entry in the cache, so any time they are accessed with a different matrix, the
24  * old entry gets evicted.
25  */
26 class GrCCPathCache {
27 public:
28     GrCCPathCache(uint32_t contextUniqueID);
29     ~GrCCPathCache();
30 
31     class Key : public SkPathRef::GenIDChangeListener {
32     public:
33         static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32,
34                                const void* data = nullptr);
35 
pathCacheUniqueID()36         uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
37 
dataSizeInBytes()38         int dataSizeInBytes() const { return fDataSizeInBytes; }
39         const uint32_t* data() const;
40 
resetDataCountU32(int dataCountU32)41         void resetDataCountU32(int dataCountU32) {
42             SkASSERT(dataCountU32 <= fDataReserveCountU32);
43             fDataSizeInBytes = dataCountU32 * sizeof(uint32_t);
44         }
45         uint32_t* data();
46 
47         bool operator==(const Key& that) const {
48             return fDataSizeInBytes == that.fDataSizeInBytes &&
49                    !memcmp(this->data(), that.data(), fDataSizeInBytes);
50         }
51 
52         // Called when our corresponding path is modified or deleted. Not threadsafe.
53         void onChange() override;
54 
55     private:
Key(uint32_t pathCacheUniqueID,int dataCountU32)56         Key(uint32_t pathCacheUniqueID, int dataCountU32)
57                 : fPathCacheUniqueID(pathCacheUniqueID)
58                 , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t))
59                 SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) {
60             SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID);
61         }
62 
63         const uint32_t fPathCacheUniqueID;
64         int fDataSizeInBytes;
65         SkDEBUGCODE(const int fDataReserveCountU32);
66         // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey
67         // provides access to it.
68     };
69 
70     // Stores the components of a transformation that affect a path mask (i.e. everything but
71     // integer translation). During construction, any integer portions of the matrix's translate are
72     // shaved off and returned to the caller. The caller is responsible for those integer shifts.
73     struct MaskTransform {
74         MaskTransform(const SkMatrix& m, SkIVector* shift);
75         float fMatrix2x2[4];
76 #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
77         // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
78         // On AOSP we follow after HWUI and ignore the subpixel translate.
79         float fSubpixelTranslate[2];
80 #endif
81     };
82 
83     // Represents a ref on a GrCCPathCacheEntry that should only be used during the current flush.
84     class OnFlushEntryRef : SkNoncopyable {
85     public:
86         static OnFlushEntryRef OnFlushRef(GrCCPathCacheEntry*);
87         OnFlushEntryRef() = default;
OnFlushEntryRef(OnFlushEntryRef && ref)88         OnFlushEntryRef(OnFlushEntryRef&& ref) : fEntry(skstd::exchange(ref.fEntry, nullptr)) {}
89         ~OnFlushEntryRef();
90 
get()91         GrCCPathCacheEntry* get() const { return fEntry; }
92         GrCCPathCacheEntry* operator->() const { return fEntry; }
93         GrCCPathCacheEntry& operator*() const { return *fEntry; }
94         explicit operator bool() const { return fEntry; }
95         void operator=(OnFlushEntryRef&& ref) { fEntry = skstd::exchange(ref.fEntry, nullptr); }
96 
97     private:
OnFlushEntryRef(GrCCPathCacheEntry * entry)98         OnFlushEntryRef(GrCCPathCacheEntry* entry) : fEntry(entry) {}
99         GrCCPathCacheEntry* fEntry = nullptr;
100     };
101 
102     // Finds an entry in the cache that matches the given shape and transformation matrix.
103     // 'maskShift' is filled with an integer post-translate that the caller must apply when drawing
104     // the entry's mask to the device.
105     //
106     // NOTE: Shapes are only given one entry, so any time they are accessed with a new
107     // transformation, the old entry gets evicted.
108     OnFlushEntryRef find(GrOnFlushResourceProvider*, const GrShape&,
109                          const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix,
110                          SkIVector* maskShift);
111 
112     void doPreFlushProcessing();
113 
114     void purgeEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point& purgeTime);
115 
116     // As we evict entries from our local path cache, we accumulate a list of invalidated atlas
117     // textures. This call purges the invalidated atlas textures from the mainline GrResourceCache.
118     // This call is available with two different "provider" objects, to accomodate whatever might
119     // be available at the callsite.
120     void purgeInvalidatedAtlasTextures(GrOnFlushResourceProvider*);
121     void purgeInvalidatedAtlasTextures(GrProxyProvider*);
122 
123 private:
124     // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
125     // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
126     // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting
127     // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup.
128     class HashNode : SkNoncopyable {
129     public:
130         static const Key& GetKey(const HashNode&);
Hash(const Key & key)131         inline static uint32_t Hash(const Key& key) {
132             return GrResourceKeyHash(key.data(), key.dataSizeInBytes());
133         }
134 
135         HashNode() = default;
136         HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&);
HashNode(HashNode && node)137         HashNode(HashNode&& node)
138                 : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) {
139             SkASSERT(!node.fEntry);
140         }
141 
142         ~HashNode();
143 
144         void operator=(HashNode&& node);
145 
entry()146         GrCCPathCacheEntry* entry() const { return fEntry.get(); }
147 
148     private:
149         GrCCPathCache* fPathCache = nullptr;
150         sk_sp<GrCCPathCacheEntry> fEntry;
151     };
152 
quickPerFlushTimestamp()153     GrStdSteadyClock::time_point quickPerFlushTimestamp() {
154         // time_point::min() means it's time to update fPerFlushTimestamp with a newer clock read.
155         if (GrStdSteadyClock::time_point::min() == fPerFlushTimestamp) {
156             fPerFlushTimestamp = GrStdSteadyClock::now();
157         }
158         return fPerFlushTimestamp;
159     }
160 
161     void evict(const GrCCPathCache::Key&, GrCCPathCacheEntry* = nullptr);
162 
163     // Evicts all the cache entries whose keys have been queued up in fInvalidatedKeysInbox via
164     // SkPath listeners.
165     void evictInvalidatedCacheKeys();
166 
167     const uint32_t fContextUniqueID;
168 
169     SkTHashTable<HashNode, const Key&> fHashTable;
170     SkTInternalLList<GrCCPathCacheEntry> fLRU;
171     SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox;
172     sk_sp<Key> fScratchKey;  // Reused for creating a temporary key in the find() method.
173 
174     // We only read the clock once per flush, and cache it in this variable. This prevents us from
175     // excessive clock reads for cache timestamps that might degrade performance.
176     GrStdSteadyClock::time_point fPerFlushTimestamp = GrStdSteadyClock::time_point::min();
177 
178     // As we evict entries from our local path cache, we accumulate lists of invalidated atlas
179     // textures in these two members. We hold these until we purge them from the GrResourceCache
180     // (e.g. via purgeInvalidatedAtlasTextures().)
181     SkSTArray<4, sk_sp<GrTextureProxy>> fInvalidatedProxies;
182     SkSTArray<4, GrUniqueKey> fInvalidatedProxyUniqueKeys;
183 
184     friend class GrCCCachedAtlas;  // To append to fInvalidatedProxies, fInvalidatedProxyUniqueKeys.
185 
186 public:
187     const SkTHashTable<HashNode, const Key&>& testingOnly_getHashTable() const;
188     const SkTInternalLList<GrCCPathCacheEntry>& testingOnly_getLRU() const;
189 };
190 
191 /**
192  * This class stores all the data necessary to draw a specific path + matrix combination from their
193  * corresponding cached atlas.
194  */
195 class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> {
196 public:
197     SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
198 
~GrCCPathCacheEntry()199     ~GrCCPathCacheEntry() {
200         SkASSERT(this->hasBeenEvicted());  // Should have called GrCCPathCache::evict().
201         SkASSERT(!fCachedAtlas);
202         SkASSERT(0 == fOnFlushRefCnt);
203     }
204 
cacheKey()205     const GrCCPathCache::Key& cacheKey() const { SkASSERT(fCacheKey); return *fCacheKey; }
206 
207     // The number of flushes during which this specific entry (path + matrix combination) has been
208     // pulled from the path cache. If a path is pulled from the cache more than once in a single
209     // flush, the hit count is only incremented once.
210     //
211     // If the entry did not previously exist, its hit count will be 1.
hitCount()212     int hitCount() const { return fHitCount; }
213 
214     // The accumulative region of the path that has been drawn during the lifetime of this cache
215     // entry (as defined by the 'clippedDrawBounds' parameter for GrCCPathCache::find).
hitRect()216     const SkIRect& hitRect() const { return fHitRect; }
217 
cachedAtlas()218     const GrCCCachedAtlas* cachedAtlas() const { return fCachedAtlas.get(); }
219 
devIBounds()220     const SkIRect& devIBounds() const { return fDevIBounds; }
width()221     int width() const { return fDevIBounds.width(); }
height()222     int height() const { return fDevIBounds.height(); }
223 
224     enum class ReleaseAtlasResult : bool {
225         kNone,
226         kDidInvalidateFromCache
227     };
228 
229     // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
230     // The caller will stash this atlas texture away after drawing, and during the next flush,
231     // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
232     void setCoverageCountAtlas(GrOnFlushResourceProvider*, GrCCAtlas*, const SkIVector& atlasOffset,
233                                const SkRect& devBounds, const SkRect& devBounds45,
234                                const SkIRect& devIBounds, const SkIVector& maskShift);
235 
236     // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
237     // the entry at the new atlas and updates the GrCCCCachedAtlas data.
238     ReleaseAtlasResult upgradeToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
239                                                      GrCCAtlas*, const SkIVector& newAtlasOffset);
240 
241 private:
242     using MaskTransform = GrCCPathCache::MaskTransform;
243 
GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey,const MaskTransform & maskTransform)244     GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform)
245             : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) {
246     }
247 
hasBeenEvicted()248     bool hasBeenEvicted() const { return fCacheKey->shouldUnregisterFromPath(); }
249 
250     // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
251     // resource cache if needed.
252     ReleaseAtlasResult releaseCachedAtlas(GrCCPathCache*);
253 
254     sk_sp<GrCCPathCache::Key> fCacheKey;
255     GrStdSteadyClock::time_point fTimestamp;
256     int fHitCount = 0;
257     SkIRect fHitRect = SkIRect::MakeEmpty();
258 
259     sk_sp<GrCCCachedAtlas> fCachedAtlas;
260     SkIVector fAtlasOffset;
261 
262     MaskTransform fMaskTransform;
263     SkRect fDevBounds;
264     SkRect fDevBounds45;
265     SkIRect fDevIBounds;
266 
267     int fOnFlushRefCnt = 0;
268 
269     friend class GrCCPathCache;
270     friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
271                                                  uint64_t color, DoEvenOddFill);  // To access data.
272 
273 public:
274     int testingOnly_peekOnFlushRefCnt() const;
275 };
276 
277 /**
278  * Encapsulates the data for an atlas whose texture is stored in the mainline GrResourceCache. Many
279  * instances of GrCCPathCacheEntry will reference the same GrCCCachedAtlas.
280  *
281  * We use this object to track the percentage of the original atlas pixels that could still ever
282  * potentially be reused (i.e., those which still represent an extant path). When the percentage
283  * of useful pixels drops below 50%, we purge the entire texture from the resource cache.
284  *
285  * This object also holds a ref on the atlas's actual texture proxy during flush. When
286  * fOnFlushRefCnt decrements back down to zero, we release fOnFlushProxy and reset it back to null.
287  */
288 class GrCCCachedAtlas : public GrNonAtomicRef<GrCCCachedAtlas> {
289 public:
290     using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
291 
GrCCCachedAtlas(GrCCAtlas::CoverageType type,const GrUniqueKey & textureKey,sk_sp<GrTextureProxy> onFlushProxy)292     GrCCCachedAtlas(GrCCAtlas::CoverageType type, const GrUniqueKey& textureKey,
293                     sk_sp<GrTextureProxy> onFlushProxy)
294             : fCoverageType(type)
295             , fTextureKey(textureKey)
296             , fOnFlushProxy(std::move(onFlushProxy)) {}
297 
~GrCCCachedAtlas()298     ~GrCCCachedAtlas() {
299         SkASSERT(!fOnFlushProxy);
300         SkASSERT(!fOnFlushRefCnt);
301     }
302 
coverageType()303     GrCCAtlas::CoverageType coverageType() const  { return fCoverageType; }
textureKey()304     const GrUniqueKey& textureKey() const { return fTextureKey; }
305 
getOnFlushProxy()306     GrTextureProxy* getOnFlushProxy() const { return fOnFlushProxy.get(); }
307 
setOnFlushProxy(sk_sp<GrTextureProxy> proxy)308     void setOnFlushProxy(sk_sp<GrTextureProxy> proxy) {
309         SkASSERT(!fOnFlushProxy);
310         fOnFlushProxy = std::move(proxy);
311     }
312 
addPathPixels(int numPixels)313     void addPathPixels(int numPixels) { fNumPathPixels += numPixels; }
314     ReleaseAtlasResult invalidatePathPixels(GrCCPathCache*, int numPixels);
315 
peekOnFlushRefCnt()316     int peekOnFlushRefCnt() const { return fOnFlushRefCnt; }
317     void incrOnFlushRefCnt(int count = 1) const {
318         SkASSERT(count > 0);
319         SkASSERT(fOnFlushProxy);
320         fOnFlushRefCnt += count;
321     }
322     void decrOnFlushRefCnt(int count = 1) const;
323 
324 private:
325     const GrCCAtlas::CoverageType fCoverageType;
326     const GrUniqueKey fTextureKey;
327 
328     int fNumPathPixels = 0;
329     int fNumInvalidatedPathPixels = 0;
330     bool fIsInvalidatedFromResourceCache = false;
331 
332     mutable sk_sp<GrTextureProxy> fOnFlushProxy;
333     mutable int fOnFlushRefCnt = 0;
334 
335 public:
336     int testingOnly_peekOnFlushRefCnt() const;
337 };
338 
339 
HashNode(GrCCPathCache * pathCache,sk_sp<Key> key,const MaskTransform & m,const GrShape & shape)340 inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key,
341                                          const MaskTransform& m, const GrShape& shape)
342         : fPathCache(pathCache)
343         , fEntry(new GrCCPathCacheEntry(key, m)) {
344     SkASSERT(shape.hasUnstyledKey());
345     shape.addGenIDChangeListener(std::move(key));
346 }
347 
GetKey(const GrCCPathCache::HashNode & node)348 inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey(
349         const GrCCPathCache::HashNode& node) {
350     return *node.entry()->fCacheKey;
351 }
352 
~HashNode()353 inline GrCCPathCache::HashNode::~HashNode() {
354     SkASSERT(!fEntry || fEntry->hasBeenEvicted());  // Should have called GrCCPathCache::evict().
355 }
356 
357 inline void GrCCPathCache::HashNode::operator=(HashNode&& node) {
358     SkASSERT(!fEntry || fEntry->hasBeenEvicted());  // Should have called GrCCPathCache::evict().
359     fEntry = skstd::exchange(node.fEntry, nullptr);
360 }
361 
set(const GrCCPathCacheEntry & entry,const SkIVector & shift,uint64_t color,DoEvenOddFill doEvenOddFill)362 inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry,
363                                              const SkIVector& shift, uint64_t color,
364                                              DoEvenOddFill doEvenOddFill) {
365     float dx = (float)shift.fX, dy = (float)shift.fY;
366     this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy),
367               entry.fAtlasOffset - shift, color, doEvenOddFill);
368 }
369 
370 #endif
371