1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/DDLPromiseImageHelper.h"
9
10 #include "include/core/SkDeferredDisplayListRecorder.h"
11 #include "include/core/SkPicture.h"
12 #include "include/core/SkSerialProcs.h"
13 #include "include/gpu/GrDirectContext.h"
14 #include "include/gpu/GrYUVABackendTextures.h"
15 #include "src/codec/SkCodecImageGenerator.h"
16 #include "src/core/SkCachedData.h"
17 #include "src/core/SkMipmap.h"
18 #include "src/core/SkTaskGroup.h"
19 #include "src/gpu/GrDirectContextPriv.h"
20 #include "src/image/SkImage_Base.h"
21 #include "src/image/SkImage_GpuYUVA.h"
22
PromiseImageInfo(int index,uint32_t originalUniqueID,const SkImageInfo & ii)23 DDLPromiseImageHelper::PromiseImageInfo::PromiseImageInfo(int index,
24 uint32_t originalUniqueID,
25 const SkImageInfo& ii)
26 : fIndex(index)
27 , fOriginalUniqueID(originalUniqueID)
28 , fImageInfo(ii) {
29 }
30
PromiseImageInfo(PromiseImageInfo && other)31 DDLPromiseImageHelper::PromiseImageInfo::PromiseImageInfo(PromiseImageInfo&& other)
32 : fIndex(other.fIndex)
33 , fOriginalUniqueID(other.fOriginalUniqueID)
34 , fImageInfo(other.fImageInfo)
35 , fBaseLevel(other.fBaseLevel)
36 , fMipLevels(std::move(other.fMipLevels))
37 , fYUVAPixmaps(std::move(other.fYUVAPixmaps)) {
38 for (int i = 0; i < SkYUVAInfo::kMaxPlanes; ++i) {
39 fCallbackContexts[i] = std::move(other.fCallbackContexts[i]);
40 }
41 }
42
~PromiseImageInfo()43 DDLPromiseImageHelper::PromiseImageInfo::~PromiseImageInfo() {}
44
normalMipLevels() const45 std::unique_ptr<SkPixmap[]> DDLPromiseImageHelper::PromiseImageInfo::normalMipLevels() const {
46 SkASSERT(!this->isYUV());
47 std::unique_ptr<SkPixmap[]> pixmaps(new SkPixmap[this->numMipLevels()]);
48 pixmaps[0] = fBaseLevel.pixmap();
49 if (fMipLevels) {
50 for (int i = 0; i < fMipLevels->countLevels(); ++i) {
51 SkMipmap::Level mipLevel;
52 fMipLevels->getLevel(i, &mipLevel);
53 pixmaps[i+1] = mipLevel.fPixmap;
54 }
55 }
56 return pixmaps;
57 }
58
numMipLevels() const59 int DDLPromiseImageHelper::PromiseImageInfo::numMipLevels() const {
60 SkASSERT(!this->isYUV());
61 return fMipLevels ? fMipLevels->countLevels()+1 : 1;
62 }
63
setMipLevels(const SkBitmap & baseLevel,std::unique_ptr<SkMipmap> mipLevels)64 void DDLPromiseImageHelper::PromiseImageInfo::setMipLevels(const SkBitmap& baseLevel,
65 std::unique_ptr<SkMipmap> mipLevels) {
66 fBaseLevel = baseLevel;
67 fMipLevels = std::move(mipLevels);
68 }
69
70 ///////////////////////////////////////////////////////////////////////////////////////////////////
~PromiseImageCallbackContext()71 PromiseImageCallbackContext::~PromiseImageCallbackContext() {
72 SkASSERT(fDoneCnt == fNumImages);
73 SkASSERT(!fTotalFulfills || fDoneCnt);
74
75 if (fPromiseImageTexture) {
76 fContext->deleteBackendTexture(fPromiseImageTexture->backendTexture());
77 }
78 }
79
setBackendTexture(const GrBackendTexture & backendTexture)80 void PromiseImageCallbackContext::setBackendTexture(const GrBackendTexture& backendTexture) {
81 SkASSERT(!fPromiseImageTexture);
82 SkASSERT(fBackendFormat == backendTexture.getBackendFormat());
83 fPromiseImageTexture = SkPromiseImageTexture::Make(backendTexture);
84 }
85
destroyBackendTexture()86 void PromiseImageCallbackContext::destroyBackendTexture() {
87 SkASSERT(!fPromiseImageTexture || fPromiseImageTexture->unique());
88
89 if (fPromiseImageTexture) {
90 fContext->deleteBackendTexture(fPromiseImageTexture->backendTexture());
91 }
92 fPromiseImageTexture = nullptr;
93 }
94
95 ///////////////////////////////////////////////////////////////////////////////////////////////////
96
recreateSKP(GrDirectContext * dContext,SkPicture * inputPicture)97 sk_sp<SkPicture> DDLPromiseImageHelper::recreateSKP(GrDirectContext* dContext,
98 SkPicture* inputPicture) {
99 SkSerialProcs procs;
100
101 procs.fImageCtx = this;
102 procs.fImageProc = [](SkImage* image, void* ctx) -> sk_sp<SkData> {
103 auto helper = static_cast<DDLPromiseImageHelper*>(ctx);
104
105 int id = helper->findOrDefineImage(image);
106
107 // Even if 'id' is invalid (i.e., -1) write it to the SKP
108 return SkData::MakeWithCopy(&id, sizeof(id));
109 };
110
111 sk_sp<SkData> compressedPictureData = inputPicture->serialize(&procs);
112 if (!compressedPictureData) {
113 return nullptr;
114 }
115
116 this->createCallbackContexts(dContext);
117
118 return this->reinflateSKP(dContext->threadSafeProxy(), compressedPictureData.get());
119 }
120
create_yuva_texture(GrDirectContext * direct,const SkPixmap & pm,int texIndex)121 static GrBackendTexture create_yuva_texture(GrDirectContext* direct,
122 const SkPixmap& pm,
123 int texIndex) {
124 SkASSERT(texIndex >= 0 && texIndex <= 3);
125
126 bool finishedBECreate = false;
127 auto markFinished = [](void* context) {
128 *(bool*)context = true;
129 };
130 auto beTex = direct->createBackendTexture(pm,
131 kTopLeft_GrSurfaceOrigin,
132 GrRenderable::kNo,
133 GrProtected::kNo,
134 markFinished,
135 &finishedBECreate);
136 if (beTex.isValid()) {
137 direct->submit();
138 while (!finishedBECreate) {
139 direct->checkAsyncWorkCompletion();
140 }
141 }
142 return beTex;
143 }
144
145 /*
146 * Create backend textures and upload data to them for all the textures required to satisfy
147 * a single promise image.
148 * For YUV textures this will result in up to 4 actual textures.
149 */
CreateBETexturesForPromiseImage(GrDirectContext * direct,PromiseImageInfo * info)150 void DDLPromiseImageHelper::CreateBETexturesForPromiseImage(GrDirectContext* direct,
151 PromiseImageInfo* info) {
152 if (info->isYUV()) {
153 int numPixmaps = info->yuvaInfo().numPlanes();
154 for (int j = 0; j < numPixmaps; ++j) {
155 const SkPixmap& yuvPixmap = info->yuvPixmap(j);
156
157 PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
158 SkASSERT(callbackContext);
159
160 // DDL TODO: what should we do with mipmapped YUV images
161 callbackContext->setBackendTexture(create_yuva_texture(direct, yuvPixmap, j));
162 SkASSERT(callbackContext->promiseImageTexture());
163 }
164 } else {
165 PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
166 if (!callbackContext) {
167 // This texture would've been too large to fit on the GPU
168 return;
169 }
170
171 std::unique_ptr<SkPixmap[]> mipLevels = info->normalMipLevels();
172
173 bool finishedBECreate = false;
174 auto markFinished = [](void* context) {
175 *(bool*)context = true;
176 };
177 auto backendTex = direct->createBackendTexture(mipLevels.get(),
178 info->numMipLevels(),
179 kTopLeft_GrSurfaceOrigin,
180 GrRenderable::kNo,
181 GrProtected::kNo,
182 markFinished,
183 &finishedBECreate);
184 SkASSERT(backendTex.isValid());
185 direct->submit();
186 while (!finishedBECreate) {
187 direct->checkAsyncWorkCompletion();
188 }
189
190 callbackContext->setBackendTexture(backendTex);
191 }
192 }
193
DeleteBETexturesForPromiseImage(PromiseImageInfo * info)194 void DDLPromiseImageHelper::DeleteBETexturesForPromiseImage(PromiseImageInfo* info) {
195 if (info->isYUV()) {
196 int numPixmaps = info->yuvaInfo().numPlanes();
197 for (int j = 0; j < numPixmaps; ++j) {
198 PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
199 SkASSERT(callbackContext);
200
201 callbackContext->destroyBackendTexture();
202 SkASSERT(!callbackContext->promiseImageTexture());
203 }
204 } else {
205 PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
206 if (!callbackContext) {
207 // This texture would've been too large to fit on the GPU
208 return;
209 }
210
211 callbackContext->destroyBackendTexture();
212 SkASSERT(!callbackContext->promiseImageTexture());
213 }
214 }
215
createCallbackContexts(GrDirectContext * direct)216 void DDLPromiseImageHelper::createCallbackContexts(GrDirectContext* direct) {
217 const GrCaps* caps = direct->priv().caps();
218 const int maxDimension = caps->maxTextureSize();
219
220 for (int i = 0; i < fImageInfo.count(); ++i) {
221 PromiseImageInfo& info = fImageInfo[i];
222
223 if (info.isYUV()) {
224 int numPixmaps = info.yuvaInfo().numPlanes();
225
226 for (int j = 0; j < numPixmaps; ++j) {
227 const SkPixmap& yuvPixmap = info.yuvPixmap(j);
228
229 GrBackendFormat backendFormat = direct->defaultBackendFormat(yuvPixmap.colorType(),
230 GrRenderable::kNo);
231
232 sk_sp<PromiseImageCallbackContext> callbackContext(
233 new PromiseImageCallbackContext(direct, backendFormat));
234
235 info.setCallbackContext(j, std::move(callbackContext));
236 }
237 } else {
238 const SkBitmap& baseLevel = info.baseLevel();
239
240 // TODO: explicitly mark the PromiseImageInfo as too big and check in uploadAllToGPU
241 if (maxDimension < std::max(baseLevel.width(), baseLevel.height())) {
242 // This won't fit on the GPU. Fallback to a raster-backed image per tile.
243 continue;
244 }
245
246 GrBackendFormat backendFormat = direct->defaultBackendFormat(baseLevel.colorType(),
247 GrRenderable::kNo);
248 if (!caps->isFormatTexturable(backendFormat, GrTextureType::k2D)) {
249 continue;
250 }
251
252 sk_sp<PromiseImageCallbackContext> callbackContext(
253 new PromiseImageCallbackContext(direct, backendFormat));
254
255 info.setCallbackContext(0, std::move(callbackContext));
256 }
257 }
258 }
259
uploadAllToGPU(SkTaskGroup * taskGroup,GrDirectContext * direct)260 void DDLPromiseImageHelper::uploadAllToGPU(SkTaskGroup* taskGroup, GrDirectContext* direct) {
261 if (taskGroup) {
262 for (int i = 0; i < fImageInfo.count(); ++i) {
263 PromiseImageInfo* info = &fImageInfo[i];
264
265 taskGroup->add([direct, info]() { CreateBETexturesForPromiseImage(direct, info); });
266 }
267 } else {
268 for (int i = 0; i < fImageInfo.count(); ++i) {
269 CreateBETexturesForPromiseImage(direct, &fImageInfo[i]);
270 }
271 }
272 }
273
deleteAllFromGPU(SkTaskGroup * taskGroup,GrDirectContext * direct)274 void DDLPromiseImageHelper::deleteAllFromGPU(SkTaskGroup* taskGroup, GrDirectContext* direct) {
275 if (taskGroup) {
276 for (int i = 0; i < fImageInfo.count(); ++i) {
277 PromiseImageInfo* info = &fImageInfo[i];
278
279 taskGroup->add([info]() { DeleteBETexturesForPromiseImage(info); });
280 }
281 } else {
282 for (int i = 0; i < fImageInfo.count(); ++i) {
283 DeleteBETexturesForPromiseImage(&fImageInfo[i]);
284 }
285 }
286 }
287
reinflateSKP(sk_sp<GrContextThreadSafeProxy> threadSafeProxy,SkData * compressedPictureData)288 sk_sp<SkPicture> DDLPromiseImageHelper::reinflateSKP(
289 sk_sp<GrContextThreadSafeProxy> threadSafeProxy,
290 SkData* compressedPictureData) {
291 DeserialImageProcContext procContext { std::move(threadSafeProxy), this };
292
293 SkDeserialProcs procs;
294 procs.fImageCtx = (void*) &procContext;
295 procs.fImageProc = CreatePromiseImages;
296
297 return SkPicture::MakeFromData(compressedPictureData, &procs);
298 }
299
300 // This generates promise images to replace the indices in the compressed picture.
CreatePromiseImages(const void * rawData,size_t length,void * ctxIn)301 sk_sp<SkImage> DDLPromiseImageHelper::CreatePromiseImages(const void* rawData,
302 size_t length,
303 void* ctxIn) {
304 DeserialImageProcContext* procContext = static_cast<DeserialImageProcContext*>(ctxIn);
305 DDLPromiseImageHelper* helper = procContext->fHelper;
306
307 SkASSERT(length == sizeof(int));
308
309 const int* indexPtr = static_cast<const int*>(rawData);
310 if (!helper->isValidID(*indexPtr)) {
311 return nullptr;
312 }
313
314 const DDLPromiseImageHelper::PromiseImageInfo& curImage = helper->getInfo(*indexPtr);
315
316 // If there is no callback context that means 'createCallbackContexts' determined the
317 // texture wouldn't fit on the GPU. Create a bitmap-backed image.
318 if (!curImage.isYUV() && !curImage.callbackContext(0)) {
319 SkASSERT(curImage.baseLevel().isImmutable());
320 return curImage.baseLevel().asImage();
321 }
322
323 SkASSERT(curImage.index() == *indexPtr);
324
325 sk_sp<SkImage> image;
326 if (curImage.isYUV()) {
327 GrBackendFormat backendFormats[SkYUVAInfo::kMaxPlanes];
328 const SkYUVAInfo& yuvaInfo = curImage.yuvaInfo();
329 void* contexts[SkYUVAInfo::kMaxPlanes] = {nullptr, nullptr, nullptr, nullptr};
330 int textureCount = yuvaInfo.numPlanes();
331 for (int i = 0; i < textureCount; ++i) {
332 backendFormats[i] = curImage.backendFormat(i);
333 contexts[i] = curImage.refCallbackContext(i).release();
334 }
335 GrYUVABackendTextureInfo yuvaBackendTextures(yuvaInfo,
336 backendFormats,
337 GrMipmapped::kNo,
338 kTopLeft_GrSurfaceOrigin);
339 image = SkImage::MakePromiseYUVATexture(
340 procContext->fThreadSafeProxy,
341 yuvaBackendTextures,
342 curImage.refOverallColorSpace(),
343 PromiseImageCallbackContext::PromiseImageFulfillProc,
344 PromiseImageCallbackContext::PromiseImageReleaseProc,
345 contexts);
346 if (!image) {
347 return nullptr;
348 }
349 for (int i = 0; i < textureCount; ++i) {
350 curImage.callbackContext(i)->wasAddedToImage();
351 }
352
353 } else {
354 const GrBackendFormat& backendFormat = curImage.backendFormat(0);
355 SkASSERT(backendFormat.isValid());
356
357 image = SkImage::MakePromiseTexture(procContext->fThreadSafeProxy,
358 backendFormat,
359 curImage.overallDimensions(),
360 curImage.mipMapped(0),
361 GrSurfaceOrigin::kTopLeft_GrSurfaceOrigin,
362 curImage.overallColorType(),
363 curImage.overallAlphaType(),
364 curImage.refOverallColorSpace(),
365 PromiseImageCallbackContext::PromiseImageFulfillProc,
366 PromiseImageCallbackContext::PromiseImageReleaseProc,
367 (void*)curImage.refCallbackContext(0).release());
368 curImage.callbackContext(0)->wasAddedToImage();
369 }
370 helper->fPromiseImages.push_back(image);
371 SkASSERT(image);
372 return image;
373 }
374
findImage(SkImage * image) const375 int DDLPromiseImageHelper::findImage(SkImage* image) const {
376 for (int i = 0; i < fImageInfo.count(); ++i) {
377 if (fImageInfo[i].originalUniqueID() == image->uniqueID()) { // trying to dedup here
378 SkASSERT(fImageInfo[i].index() == i);
379 SkASSERT(this->isValidID(i) && this->isValidID(fImageInfo[i].index()));
380 return i;
381 }
382 }
383 return -1;
384 }
385
addImage(SkImage * image)386 int DDLPromiseImageHelper::addImage(SkImage* image) {
387 SkImage_Base* ib = as_IB(image);
388
389 SkImageInfo overallII = SkImageInfo::Make(image->width(), image->height(),
390 image->colorType() == kBGRA_8888_SkColorType
391 ? kRGBA_8888_SkColorType
392 : image->colorType(),
393 image->alphaType(),
394 image->refColorSpace());
395
396 PromiseImageInfo& newImageInfo = fImageInfo.emplace_back(fImageInfo.count(),
397 image->uniqueID(),
398 overallII);
399
400 auto codec = SkCodecImageGenerator::MakeFromEncodedCodec(ib->refEncodedData());
401 SkYUVAPixmapInfo yuvaInfo;
402 if (codec && codec->queryYUVAInfo(fSupportedYUVADataTypes, &yuvaInfo)) {
403 auto yuvaPixmaps = SkYUVAPixmaps::Allocate(yuvaInfo);
404 if (!codec->getYUVAPlanes(yuvaPixmaps)) {
405 return -1;
406 }
407 SkASSERT(yuvaPixmaps.isValid());
408 newImageInfo.setYUVPlanes(std::move(yuvaPixmaps));
409 } else {
410 sk_sp<SkImage> rasterImage = image->makeRasterImage(); // force decoding of lazy images
411 if (!rasterImage) {
412 return -1;
413 }
414
415 SkBitmap tmp;
416 tmp.allocPixels(overallII);
417
418 if (!rasterImage->readPixels(nullptr, tmp.pixmap(), 0, 0)) {
419 return -1;
420 }
421
422 tmp.setImmutable();
423
424 // Given how the DDL testing harness works (i.e., only modifying the SkImages w/in an
425 // SKP) we don't know if a given SkImage will require mipmapping. To work around this
426 // we just try to create all the backend textures as mipmapped but, failing that, fall
427 // back to un-mipped.
428 std::unique_ptr<SkMipmap> mipmaps(SkMipmap::Build(tmp.pixmap(), nullptr));
429
430 newImageInfo.setMipLevels(tmp, std::move(mipmaps));
431 }
432 // In either case newImageInfo's PromiseImageCallbackContext is filled in by uploadAllToGPU
433
434 return fImageInfo.count()-1;
435 }
436
findOrDefineImage(SkImage * image)437 int DDLPromiseImageHelper::findOrDefineImage(SkImage* image) {
438 int preExistingID = this->findImage(image);
439 if (preExistingID >= 0) {
440 SkASSERT(this->isValidID(preExistingID));
441 return preExistingID;
442 }
443
444 int newID = this->addImage(image);
445 return newID;
446 }
447