• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "SkBitmap.h"
9 #include "SkBitmapCache.h"
10 #include "SkImage_Base.h"
11 #include "SkImageCacherator.h"
12 #include "SkMallocPixelRef.h"
13 #include "SkNextID.h"
14 #include "SkPixelRef.h"
15 #include "SkResourceCache.h"
16 
17 #if SK_SUPPORT_GPU
18 #include "GrContext.h"
19 #include "GrGpuResourcePriv.h"
20 #include "GrImageIDTextureAdjuster.h"
21 #include "GrResourceKey.h"
22 #include "GrTextureParams.h"
23 #include "GrYUVProvider.h"
24 #include "SkGr.h"
25 #include "SkGrPriv.h"
26 #endif
27 
NewFromGenerator(SkImageGenerator * gen,const SkIRect * subset)28 SkImageCacherator* SkImageCacherator::NewFromGenerator(SkImageGenerator* gen,
29                                                        const SkIRect* subset) {
30     if (!gen) {
31         return nullptr;
32     }
33 
34     // We are required to take ownership of gen, regardless of if we return a cacherator or not
35     SkAutoTDelete<SkImageGenerator> genHolder(gen);
36 
37     const SkImageInfo& info = gen->getInfo();
38     if (info.isEmpty()) {
39         return nullptr;
40     }
41 
42     uint32_t uniqueID = gen->uniqueID();
43     const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
44     if (subset) {
45         if (!bounds.contains(*subset)) {
46             return nullptr;
47         }
48         if (*subset != bounds) {
49             // we need a different uniqueID since we really are a subset of the raw generator
50             uniqueID = SkNextID::ImageID();
51         }
52     } else {
53         subset = &bounds;
54     }
55 
56     // Now that we know we can hand-off the generator (to be owned by the cacherator) we can
57     // release our holder. (we DONT want to delete it here anymore)
58     genHolder.detach();
59 
60     return new SkImageCacherator(gen, gen->getInfo().makeWH(subset->width(), subset->height()),
61                                  SkIPoint::Make(subset->x(), subset->y()), uniqueID);
62 }
63 
SkImageCacherator(SkImageGenerator * gen,const SkImageInfo & info,const SkIPoint & origin,uint32_t uniqueID)64 SkImageCacherator::SkImageCacherator(SkImageGenerator* gen, const SkImageInfo& info,
65                                      const SkIPoint& origin, uint32_t uniqueID)
66     : fNotThreadSafeGenerator(gen)
67     , fInfo(info)
68     , fOrigin(origin)
69     , fUniqueID(uniqueID)
70 {}
71 
refEncoded(GrContext * ctx)72 SkData* SkImageCacherator::refEncoded(GrContext* ctx) {
73     ScopedGenerator generator(this);
74     return generator->refEncodedData(ctx);
75 }
76 
check_output_bitmap(const SkBitmap & bitmap,uint32_t expectedID)77 static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) {
78     SkASSERT(bitmap.getGenerationID() == expectedID);
79     SkASSERT(bitmap.isImmutable());
80     SkASSERT(bitmap.getPixels());
81     return true;
82 }
83 
84 // Note, this returns a new, mutable, bitmap, with a new genID.
85 // If you want the immutable bitmap with the same ID as our cacherator, call tryLockAsBitmap()
86 //
generateBitmap(SkBitmap * bitmap)87 bool SkImageCacherator::generateBitmap(SkBitmap* bitmap) {
88     SkBitmap::Allocator* allocator = SkResourceCache::GetAllocator();
89 
90     ScopedGenerator generator(this);
91     const SkImageInfo& genInfo = generator->getInfo();
92     if (fInfo.dimensions() == genInfo.dimensions()) {
93         SkASSERT(fOrigin.x() == 0 && fOrigin.y() == 0);
94         // fast-case, no copy needed
95         return generator->tryGenerateBitmap(bitmap, fInfo, allocator);
96     } else {
97         // need to handle subsetting, so we first generate the full size version, and then
98         // "read" from it to get our subset. See https://bug.skia.org/4213
99 
100         SkBitmap full;
101         if (!generator->tryGenerateBitmap(&full, genInfo, allocator)) {
102             return false;
103         }
104         if (!bitmap->tryAllocPixels(fInfo, nullptr, full.getColorTable())) {
105             return false;
106         }
107         return full.readPixels(bitmap->info(), bitmap->getPixels(), bitmap->rowBytes(),
108                                fOrigin.x(), fOrigin.y());
109     }
110 }
111 
directGeneratePixels(const SkImageInfo & info,void * pixels,size_t rb,int srcX,int srcY)112 bool SkImageCacherator::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb,
113                                              int srcX, int srcY) {
114     ScopedGenerator generator(this);
115     const SkImageInfo& genInfo = generator->getInfo();
116     // Currently generators do not natively handle subsets, so check that first.
117     if (srcX || srcY || genInfo.width() != info.width() || genInfo.height() != info.height()) {
118         return false;
119     }
120     return generator->getPixels(info, pixels, rb);
121 }
122 
123 //////////////////////////////////////////////////////////////////////////////////////////////////
124 
lockAsBitmapOnlyIfAlreadyCached(SkBitmap * bitmap)125 bool SkImageCacherator::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap) {
126     return SkBitmapCache::Find(fUniqueID, bitmap) && check_output_bitmap(*bitmap, fUniqueID);
127 }
128 
tryLockAsBitmap(SkBitmap * bitmap,const SkImage * client,SkImage::CachingHint chint)129 bool SkImageCacherator::tryLockAsBitmap(SkBitmap* bitmap, const SkImage* client,
130                                         SkImage::CachingHint chint) {
131     if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap)) {
132         return true;
133     }
134     if (!this->generateBitmap(bitmap)) {
135         return false;
136     }
137 
138     bitmap->pixelRef()->setImmutableWithID(fUniqueID);
139     if (SkImage::kAllow_CachingHint == chint) {
140         SkBitmapCache::Add(fUniqueID, *bitmap);
141         if (client) {
142             as_IB(client)->notifyAddedToCache();
143         }
144     }
145     return true;
146 }
147 
lockAsBitmap(SkBitmap * bitmap,const SkImage * client,SkImage::CachingHint chint)148 bool SkImageCacherator::lockAsBitmap(SkBitmap* bitmap, const SkImage* client,
149                                      SkImage::CachingHint chint) {
150     if (this->tryLockAsBitmap(bitmap, client, chint)) {
151         return check_output_bitmap(*bitmap, fUniqueID);
152     }
153 
154 #if SK_SUPPORT_GPU
155     // Try to get a texture and read it back to raster (and then cache that with our ID)
156     SkAutoTUnref<GrTexture> tex;
157 
158     {
159         ScopedGenerator generator(this);
160         SkIRect subset = SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
161         tex.reset(generator->generateTexture(nullptr, &subset));
162     }
163     if (!tex) {
164         bitmap->reset();
165         return false;
166     }
167 
168     if (!bitmap->tryAllocPixels(fInfo)) {
169         bitmap->reset();
170         return false;
171     }
172 
173     const uint32_t pixelOpsFlags = 0;
174     if (!tex->readPixels(0, 0, bitmap->width(), bitmap->height(), SkImageInfo2GrPixelConfig(fInfo),
175                          bitmap->getPixels(), bitmap->rowBytes(), pixelOpsFlags)) {
176         bitmap->reset();
177         return false;
178     }
179 
180     bitmap->pixelRef()->setImmutableWithID(fUniqueID);
181     if (SkImage::kAllow_CachingHint == chint) {
182         SkBitmapCache::Add(fUniqueID, *bitmap);
183         if (client) {
184             as_IB(client)->notifyAddedToCache();
185         }
186     }
187     return check_output_bitmap(*bitmap, fUniqueID);
188 #else
189     return false;
190 #endif
191 }
192 
193 //////////////////////////////////////////////////////////////////////////////////////////////////
194 
195 #if SK_SUPPORT_GPU
196 
load_compressed_into_texture(GrContext * ctx,SkData * data,GrSurfaceDesc desc)197 static GrTexture* load_compressed_into_texture(GrContext* ctx, SkData* data, GrSurfaceDesc desc) {
198     const void* rawStart;
199     GrPixelConfig config = GrIsCompressedTextureDataSupported(ctx, data, desc.fWidth, desc.fHeight,
200                                                               &rawStart);
201     if (kUnknown_GrPixelConfig == config) {
202         return nullptr;
203     }
204 
205     desc.fConfig = config;
206     return ctx->textureProvider()->createTexture(desc, SkBudgeted::kYes, rawStart, 0);
207 }
208 
209 class Generator_GrYUVProvider : public GrYUVProvider {
210     SkImageGenerator* fGen;
211 
212 public:
Generator_GrYUVProvider(SkImageGenerator * gen)213     Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
214 
onGetID()215     uint32_t onGetID() override { return fGen->uniqueID(); }
onGetYUVSizes(SkISize sizes[3])216     bool onGetYUVSizes(SkISize sizes[3]) override {
217         return fGen->getYUV8Planes(sizes, nullptr, nullptr, nullptr);
218     }
onGetYUVPlanes(SkISize sizes[3],void * planes[3],size_t rowBytes[3],SkYUVColorSpace * space)219     bool onGetYUVPlanes(SkISize sizes[3], void* planes[3], size_t rowBytes[3],
220                         SkYUVColorSpace* space) override {
221         return fGen->getYUV8Planes(sizes, planes, rowBytes, space);
222     }
223 };
224 
set_key_and_return(GrTexture * tex,const GrUniqueKey & key)225 static GrTexture* set_key_and_return(GrTexture* tex, const GrUniqueKey& key) {
226     if (key.isValid()) {
227         tex->resourcePriv().setUniqueKey(key);
228     }
229     return tex;
230 }
231 
232 /*
233  *  We have a 5 ways to try to return a texture (in sorted order)
234  *
235  *  1. Check the cache for a pre-existing one
236  *  2. Ask the generator to natively create one
237  *  3. Ask the generator to return a compressed form that the GPU might support
238  *  4. Ask the generator to return YUV planes, which the GPU can convert
239  *  5. Ask the generator to return RGB(A) data, which the GPU can convert
240  */
lockTexture(GrContext * ctx,const GrUniqueKey & key,const SkImage * client,SkImage::CachingHint chint)241 GrTexture* SkImageCacherator::lockTexture(GrContext* ctx, const GrUniqueKey& key,
242                                           const SkImage* client, SkImage::CachingHint chint) {
243     // Values representing the various texture lock paths we can take. Used for logging the path
244     // taken to a histogram.
245     enum LockTexturePath {
246         kFailure_LockTexturePath,
247         kPreExisting_LockTexturePath,
248         kNative_LockTexturePath,
249         kCompressed_LockTexturePath,
250         kYUV_LockTexturePath,
251         kRGBA_LockTexturePath,
252     };
253 
254     enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
255 
256     // 1. Check the cache for a pre-existing one
257     if (key.isValid()) {
258         if (GrTexture* tex = ctx->textureProvider()->findAndRefTextureByUniqueKey(key)) {
259             SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
260                                      kLockTexturePathCount);
261             return tex;
262         }
263     }
264 
265     // 2. Ask the generator to natively create one
266     {
267         ScopedGenerator generator(this);
268         SkIRect subset = SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
269         if (GrTexture* tex = generator->generateTexture(ctx, &subset)) {
270             SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
271                                      kLockTexturePathCount);
272             return set_key_and_return(tex, key);
273         }
274     }
275 
276     const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(fInfo);
277 
278     // 3. Ask the generator to return a compressed form that the GPU might support
279     SkAutoTUnref<SkData> data(this->refEncoded(ctx));
280     if (data) {
281         GrTexture* tex = load_compressed_into_texture(ctx, data, desc);
282         if (tex) {
283             SK_HISTOGRAM_ENUMERATION("LockTexturePath", kCompressed_LockTexturePath,
284                                      kLockTexturePathCount);
285             return set_key_and_return(tex, key);
286         }
287     }
288 
289     // 4. Ask the generator to return YUV planes, which the GPU can convert
290     {
291         ScopedGenerator generator(this);
292         Generator_GrYUVProvider provider(generator);
293         GrTexture* tex = provider.refAsTexture(ctx, desc, true);
294         if (tex) {
295             SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
296                                      kLockTexturePathCount);
297             return set_key_and_return(tex, key);
298         }
299     }
300 
301     // 5. Ask the generator to return RGB(A) data, which the GPU can convert
302     SkBitmap bitmap;
303     if (this->tryLockAsBitmap(&bitmap, client, chint)) {
304         GrTexture* tex = GrUploadBitmapToTexture(ctx, bitmap);
305         if (tex) {
306             SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
307                                      kLockTexturePathCount);
308             return set_key_and_return(tex, key);
309         }
310     }
311     SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
312                              kLockTexturePathCount);
313     return nullptr;
314 }
315 
316 ///////////////////////////////////////////////////////////////////////////////////////////////////
317 
lockAsTexture(GrContext * ctx,const GrTextureParams & params,const SkImage * client,SkImage::CachingHint chint)318 GrTexture* SkImageCacherator::lockAsTexture(GrContext* ctx, const GrTextureParams& params,
319                                             const SkImage* client, SkImage::CachingHint chint) {
320     if (!ctx) {
321         return nullptr;
322     }
323 
324     return GrImageTextureMaker(ctx, this, client, chint).refTextureForParams(params);
325 }
326 
327 #else
328 
lockAsTexture(GrContext * ctx,const GrTextureParams &,const SkImage * client,SkImage::CachingHint)329 GrTexture* SkImageCacherator::lockAsTexture(GrContext* ctx, const GrTextureParams&,
330                                             const SkImage* client, SkImage::CachingHint) {
331     return nullptr;
332 }
333 
334 #endif
335