• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 Google Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25 
26 #include "config.h"
27 #include "platform/graphics/ImageDecodingStore.h"
28 
29 #include "platform/TraceEvent.h"
30 #include "wtf/Threading.h"
31 
32 namespace WebCore {
33 
34 namespace {
35 
36 // Allow up to 256 MBytes of discardable entries. The previous limit allowed up to
37 // 128 entries (independently of their size) which caused OOM errors on websites
38 // with a lot of (very) large images.
39 static const size_t maxTotalSizeOfDiscardableEntries = 256 * 1024 * 1024;
40 static const size_t defaultMaxTotalSizeOfHeapEntries = 32 * 1024 * 1024;
41 static bool s_imageCachingEnabled = true;
42 
43 } // namespace
44 
ImageDecodingStore()45 ImageDecodingStore::ImageDecodingStore()
46     : m_heapLimitInBytes(defaultMaxTotalSizeOfHeapEntries)
47     , m_heapMemoryUsageInBytes(0)
48     , m_discardableMemoryUsageInBytes(0)
49 {
50 }
51 
~ImageDecodingStore()52 ImageDecodingStore::~ImageDecodingStore()
53 {
54 #ifndef NDEBUG
55     setCacheLimitInBytes(0);
56     ASSERT(!m_imageCacheMap.size());
57     ASSERT(!m_decoderCacheMap.size());
58     ASSERT(!m_orderedCacheList.size());
59     ASSERT(!m_imageCacheKeyMap.size());
60     ASSERT(!m_decoderCacheKeyMap.size());
61 #endif
62 }
63 
instance()64 ImageDecodingStore* ImageDecodingStore::instance()
65 {
66     AtomicallyInitializedStatic(ImageDecodingStore*, store = ImageDecodingStore::create().leakPtr());
67     return store;
68 }
69 
setImageCachingEnabled(bool enabled)70 void ImageDecodingStore::setImageCachingEnabled(bool enabled)
71 {
72     s_imageCachingEnabled = enabled;
73 }
74 
lockCache(const ImageFrameGenerator * generator,const SkISize & scaledSize,size_t index,const ScaledImageFragment ** cachedImage)75 bool ImageDecodingStore::lockCache(const ImageFrameGenerator* generator, const SkISize& scaledSize, size_t index, const ScaledImageFragment** cachedImage)
76 {
77     ASSERT(cachedImage);
78 
79     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
80     {
81         MutexLocker lock(m_mutex);
82         // Public access is restricted to complete images only.
83         ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, scaledSize, index, ScaledImageFragment::CompleteImage));
84         if (iter == m_imageCacheMap.end())
85             return false;
86         return lockCacheEntryInternal(iter->value.get(), cachedImage, &cacheEntriesToDelete);
87     }
88 }
89 
unlockCache(const ImageFrameGenerator * generator,const ScaledImageFragment * cachedImage)90 void ImageDecodingStore::unlockCache(const ImageFrameGenerator* generator, const ScaledImageFragment* cachedImage)
91 {
92     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
93     {
94         MutexLocker lock(m_mutex);
95         cachedImage->bitmap().unlockPixels();
96         ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, cachedImage->scaledSize(), cachedImage->index(), cachedImage->generation()));
97         ASSERT_WITH_SECURITY_IMPLICATION(iter != m_imageCacheMap.end());
98 
99         CacheEntry* cacheEntry = iter->value.get();
100         cacheEntry->decrementUseCount();
101 
102         // Put the entry to the end of list.
103         m_orderedCacheList.remove(cacheEntry);
104         m_orderedCacheList.append(cacheEntry);
105 
106         // FIXME: This code is temporary such that in the new Skia
107         // discardable memory path we do not cache images.
108         // Once the transition is complete the logic to handle
109         // image caching should be removed entirely.
110         if (!s_imageCachingEnabled && !cacheEntry->useCount()) {
111             removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete);
112             removeFromCacheListInternal(cacheEntriesToDelete);
113         }
114     }
115 }
116 
insertAndLockCache(const ImageFrameGenerator * generator,PassOwnPtr<ScaledImageFragment> image)117 const ScaledImageFragment* ImageDecodingStore::insertAndLockCache(const ImageFrameGenerator* generator, PassOwnPtr<ScaledImageFragment> image)
118 {
119     // Prune old cache entries to give space for the new one.
120     prune();
121 
122     ScaledImageFragment* newImage = image.get();
123     OwnPtr<ImageCacheEntry> newCacheEntry = ImageCacheEntry::createAndUse(generator, image);
124     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
125     {
126         MutexLocker lock(m_mutex);
127 
128         ImageCacheMap::iterator iter = m_imageCacheMap.find(newCacheEntry->cacheKey());
129 
130         // It is rare but possible that the key of a new cache entry is found.
131         // This happens if the generation ID of the image object wraps around.
132         // In this case we will try to return the existing cached object and
133         // discard the new cache object.
134         if (iter != m_imageCacheMap.end()) {
135             const ScaledImageFragment* oldImage;
136             if (lockCacheEntryInternal(iter->value.get(), &oldImage, &cacheEntriesToDelete)) {
137                 newCacheEntry->decrementUseCount();
138                 return oldImage;
139             }
140         }
141 
142         // The new image is not locked yet so do it here.
143         newImage->bitmap().lockPixels();
144         insertCacheInternal(newCacheEntry.release(), &m_imageCacheMap, &m_imageCacheKeyMap);
145     }
146     return newImage;
147 }
148 
lockDecoder(const ImageFrameGenerator * generator,const SkISize & scaledSize,ImageDecoder ** decoder)149 bool ImageDecodingStore::lockDecoder(const ImageFrameGenerator* generator, const SkISize& scaledSize, ImageDecoder** decoder)
150 {
151     ASSERT(decoder);
152 
153     MutexLocker lock(m_mutex);
154     DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, scaledSize));
155     if (iter == m_decoderCacheMap.end())
156         return false;
157 
158     DecoderCacheEntry* cacheEntry = iter->value.get();
159 
160     // There can only be one user of a decoder at a time.
161     ASSERT(!cacheEntry->useCount());
162     cacheEntry->incrementUseCount();
163     *decoder = cacheEntry->cachedDecoder();
164     return true;
165 }
166 
unlockDecoder(const ImageFrameGenerator * generator,const ImageDecoder * decoder)167 void ImageDecodingStore::unlockDecoder(const ImageFrameGenerator* generator, const ImageDecoder* decoder)
168 {
169     MutexLocker lock(m_mutex);
170     DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, decoder));
171     ASSERT_WITH_SECURITY_IMPLICATION(iter != m_decoderCacheMap.end());
172 
173     CacheEntry* cacheEntry = iter->value.get();
174     cacheEntry->decrementUseCount();
175 
176     // Put the entry to the end of list.
177     m_orderedCacheList.remove(cacheEntry);
178     m_orderedCacheList.append(cacheEntry);
179 }
180 
insertDecoder(const ImageFrameGenerator * generator,PassOwnPtr<ImageDecoder> decoder,bool isDiscardable)181 void ImageDecodingStore::insertDecoder(const ImageFrameGenerator* generator, PassOwnPtr<ImageDecoder> decoder, bool isDiscardable)
182 {
183     // Prune old cache entries to give space for the new one.
184     prune();
185 
186     OwnPtr<DecoderCacheEntry> newCacheEntry = DecoderCacheEntry::create(generator, decoder, isDiscardable);
187 
188     MutexLocker lock(m_mutex);
189     ASSERT(!m_decoderCacheMap.contains(newCacheEntry->cacheKey()));
190     insertCacheInternal(newCacheEntry.release(), &m_decoderCacheMap, &m_decoderCacheKeyMap);
191 }
192 
removeDecoder(const ImageFrameGenerator * generator,const ImageDecoder * decoder)193 void ImageDecodingStore::removeDecoder(const ImageFrameGenerator* generator, const ImageDecoder* decoder)
194 {
195     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
196     {
197         MutexLocker lock(m_mutex);
198         DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, decoder));
199         ASSERT_WITH_SECURITY_IMPLICATION(iter != m_decoderCacheMap.end());
200 
201         CacheEntry* cacheEntry = iter->value.get();
202         ASSERT(cacheEntry->useCount());
203         cacheEntry->decrementUseCount();
204 
205         // Delete only one decoder cache entry. Ownership of the cache entry
206         // is transfered to cacheEntriesToDelete such that object can be deleted
207         // outside of the lock.
208         removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete);
209 
210         // Remove from LRU list.
211         removeFromCacheListInternal(cacheEntriesToDelete);
212     }
213 }
214 
isCached(const ImageFrameGenerator * generator,const SkISize & scaledSize,size_t index)215 bool ImageDecodingStore::isCached(const ImageFrameGenerator* generator, const SkISize& scaledSize, size_t index)
216 {
217     MutexLocker lock(m_mutex);
218     ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, scaledSize, index, ScaledImageFragment::CompleteImage));
219     if (iter == m_imageCacheMap.end())
220         return false;
221     return true;
222 }
223 
removeCacheIndexedByGenerator(const ImageFrameGenerator * generator)224 void ImageDecodingStore::removeCacheIndexedByGenerator(const ImageFrameGenerator* generator)
225 {
226     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
227     {
228         MutexLocker lock(m_mutex);
229 
230         // Remove image cache objects and decoder cache objects associated
231         // with a ImageFrameGenerator.
232         removeCacheIndexedByGeneratorInternal(&m_imageCacheMap, &m_imageCacheKeyMap, generator, &cacheEntriesToDelete);
233         removeCacheIndexedByGeneratorInternal(&m_decoderCacheMap, &m_decoderCacheKeyMap, generator, &cacheEntriesToDelete);
234 
235         // Remove from LRU list as well.
236         removeFromCacheListInternal(cacheEntriesToDelete);
237     }
238 }
239 
clear()240 void ImageDecodingStore::clear()
241 {
242     size_t cacheLimitInBytes;
243     {
244         MutexLocker lock(m_mutex);
245         cacheLimitInBytes = m_heapLimitInBytes;
246         m_heapLimitInBytes = 0;
247     }
248 
249     prune();
250 
251     {
252         MutexLocker lock(m_mutex);
253         m_heapLimitInBytes = cacheLimitInBytes;
254     }
255 }
256 
setCacheLimitInBytes(size_t cacheLimit)257 void ImageDecodingStore::setCacheLimitInBytes(size_t cacheLimit)
258 {
259     // Note that the discardable entries limit is constant (i.e. only the heap limit is updated).
260     {
261         MutexLocker lock(m_mutex);
262         m_heapLimitInBytes = cacheLimit;
263     }
264     prune();
265 }
266 
memoryUsageInBytes()267 size_t ImageDecodingStore::memoryUsageInBytes()
268 {
269     MutexLocker lock(m_mutex);
270     return m_heapMemoryUsageInBytes;
271 }
272 
cacheEntries()273 int ImageDecodingStore::cacheEntries()
274 {
275     MutexLocker lock(m_mutex);
276     return m_imageCacheMap.size() + m_decoderCacheMap.size();
277 }
278 
imageCacheEntries()279 int ImageDecodingStore::imageCacheEntries()
280 {
281     MutexLocker lock(m_mutex);
282     return m_imageCacheMap.size();
283 }
284 
decoderCacheEntries()285 int ImageDecodingStore::decoderCacheEntries()
286 {
287     MutexLocker lock(m_mutex);
288     return m_decoderCacheMap.size();
289 }
290 
prune()291 void ImageDecodingStore::prune()
292 {
293     TRACE_EVENT0("webkit", "ImageDecodingStore::prune");
294 
295     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
296     {
297         MutexLocker lock(m_mutex);
298 
299         // Head of the list is the least recently used entry.
300         const CacheEntry* cacheEntry = m_orderedCacheList.head();
301 
302         // Walk the list of cache entries starting from the least recently used
303         // and then keep them for deletion later.
304         while (cacheEntry) {
305             const bool isPruneNeeded = m_heapMemoryUsageInBytes > m_heapLimitInBytes || !m_heapLimitInBytes
306                 || m_discardableMemoryUsageInBytes > maxTotalSizeOfDiscardableEntries;
307             if (!isPruneNeeded)
308                 break;
309 
310             // Cache is not used; Remove it.
311             if (!cacheEntry->useCount())
312                 removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete);
313             cacheEntry = cacheEntry->next();
314         }
315 
316         // Remove from cache list as well.
317         removeFromCacheListInternal(cacheEntriesToDelete);
318     }
319 }
320 
lockCacheEntryInternal(ImageCacheEntry * cacheEntry,const ScaledImageFragment ** cachedImage,Vector<OwnPtr<CacheEntry>> * deletionList)321 bool ImageDecodingStore::lockCacheEntryInternal(ImageCacheEntry* cacheEntry, const ScaledImageFragment** cachedImage, Vector<OwnPtr<CacheEntry> >* deletionList)
322 {
323     ScaledImageFragment* image = cacheEntry->cachedImage();
324 
325     image->bitmap().lockPixels();
326 
327     // Memory for this image entry might be discarded already.
328     // In this case remove the entry.
329     if (!image->bitmap().getPixels()) {
330         image->bitmap().unlockPixels();
331         removeFromCacheInternal(cacheEntry, &m_imageCacheMap, &m_imageCacheKeyMap, deletionList);
332         removeFromCacheListInternal(*deletionList);
333         return false;
334     }
335     cacheEntry->incrementUseCount();
336     *cachedImage = image;
337     return true;
338 }
339 
340 template<class T, class U, class V>
insertCacheInternal(PassOwnPtr<T> cacheEntry,U * cacheMap,V * identifierMap)341 void ImageDecodingStore::insertCacheInternal(PassOwnPtr<T> cacheEntry, U* cacheMap, V* identifierMap)
342 {
343     const size_t cacheEntryBytes = cacheEntry->memoryUsageInBytes();
344     if (cacheEntry->isDiscardable())
345         m_discardableMemoryUsageInBytes += cacheEntryBytes;
346     else
347         m_heapMemoryUsageInBytes += cacheEntryBytes;
348 
349     // m_orderedCacheList is used to support LRU operations to reorder cache
350     // entries quickly.
351     m_orderedCacheList.append(cacheEntry.get());
352 
353     typename U::KeyType key = cacheEntry->cacheKey();
354     typename V::AddResult result = identifierMap->add(cacheEntry->generator(), typename V::MappedType());
355     result.storedValue->value.add(key);
356     cacheMap->add(key, cacheEntry);
357 
358     TRACE_COUNTER1("webkit", "ImageDecodingStoreDiscardableMemoryUsageBytes", m_discardableMemoryUsageInBytes);
359     TRACE_COUNTER1("webkit", "ImageDecodingStoreHeapMemoryUsageBytes", m_heapMemoryUsageInBytes);
360     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfImages", m_imageCacheMap.size());
361     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfDecoders", m_decoderCacheMap.size());
362 }
363 
364 template<class T, class U, class V>
removeFromCacheInternal(const T * cacheEntry,U * cacheMap,V * identifierMap,Vector<OwnPtr<CacheEntry>> * deletionList)365 void ImageDecodingStore::removeFromCacheInternal(const T* cacheEntry, U* cacheMap, V* identifierMap, Vector<OwnPtr<CacheEntry> >* deletionList)
366 {
367     const size_t cacheEntryBytes = cacheEntry->memoryUsageInBytes();
368     if (cacheEntry->isDiscardable()) {
369         ASSERT(m_discardableMemoryUsageInBytes >= cacheEntryBytes);
370         m_discardableMemoryUsageInBytes -= cacheEntryBytes;
371     } else {
372         ASSERT(m_heapMemoryUsageInBytes >= cacheEntryBytes);
373         m_heapMemoryUsageInBytes -= cacheEntryBytes;
374 
375     }
376 
377     // Remove entry from identifier map.
378     typename V::iterator iter = identifierMap->find(cacheEntry->generator());
379     ASSERT(iter != identifierMap->end());
380     iter->value.remove(cacheEntry->cacheKey());
381     if (!iter->value.size())
382         identifierMap->remove(iter);
383 
384     // Remove entry from cache map.
385     deletionList->append(cacheMap->take(cacheEntry->cacheKey()));
386 
387     TRACE_COUNTER1("webkit", "ImageDecodingStoreDiscardableMemoryUsageBytes", m_discardableMemoryUsageInBytes);
388     TRACE_COUNTER1("webkit", "ImageDecodingStoreHeapMemoryUsageBytes", m_heapMemoryUsageInBytes);
389     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfImages", m_imageCacheMap.size());
390     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfDecoders", m_decoderCacheMap.size());
391 }
392 
removeFromCacheInternal(const CacheEntry * cacheEntry,Vector<OwnPtr<CacheEntry>> * deletionList)393 void ImageDecodingStore::removeFromCacheInternal(const CacheEntry* cacheEntry, Vector<OwnPtr<CacheEntry> >* deletionList)
394 {
395     if (cacheEntry->type() == CacheEntry::TypeImage) {
396         removeFromCacheInternal(static_cast<const ImageCacheEntry*>(cacheEntry), &m_imageCacheMap, &m_imageCacheKeyMap, deletionList);
397     } else if (cacheEntry->type() == CacheEntry::TypeDecoder) {
398         removeFromCacheInternal(static_cast<const DecoderCacheEntry*>(cacheEntry), &m_decoderCacheMap, &m_decoderCacheKeyMap, deletionList);
399     } else {
400         ASSERT(false);
401     }
402 }
403 
404 template<class U, class V>
removeCacheIndexedByGeneratorInternal(U * cacheMap,V * identifierMap,const ImageFrameGenerator * generator,Vector<OwnPtr<CacheEntry>> * deletionList)405 void ImageDecodingStore::removeCacheIndexedByGeneratorInternal(U* cacheMap, V* identifierMap, const ImageFrameGenerator* generator, Vector<OwnPtr<CacheEntry> >* deletionList)
406 {
407     typename V::iterator iter = identifierMap->find(generator);
408     if (iter == identifierMap->end())
409         return;
410 
411     // Get all cache identifiers associated with generator.
412     Vector<typename U::KeyType> cacheIdentifierList;
413     copyToVector(iter->value, cacheIdentifierList);
414 
415     // For each cache identifier find the corresponding CacheEntry and remove it.
416     for (size_t i = 0; i < cacheIdentifierList.size(); ++i) {
417         ASSERT(cacheMap->contains(cacheIdentifierList[i]));
418         const typename U::MappedType::PtrType cacheEntry = cacheMap->get(cacheIdentifierList[i]);
419         ASSERT(!cacheEntry->useCount());
420         removeFromCacheInternal(cacheEntry, cacheMap, identifierMap, deletionList);
421     }
422 }
423 
removeFromCacheListInternal(const Vector<OwnPtr<CacheEntry>> & deletionList)424 void ImageDecodingStore::removeFromCacheListInternal(const Vector<OwnPtr<CacheEntry> >& deletionList)
425 {
426     for (size_t i = 0; i < deletionList.size(); ++i)
427         m_orderedCacheList.remove(deletionList[i].get());
428 }
429 
430 } // namespace WebCore
431