1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/DDLPromiseImageHelper.h"
9
10 #include "include/core/SkDeferredDisplayListRecorder.h"
11 #include "include/core/SkPicture.h"
12 #include "include/core/SkSerialProcs.h"
13 #include "include/core/SkYUVAIndex.h"
14 #include "include/core/SkYUVASizeInfo.h"
15 #include "include/gpu/GrContext.h"
16 #include "src/core/SkCachedData.h"
17 #include "src/core/SkTaskGroup.h"
18 #include "src/gpu/GrContextPriv.h"
19 #include "src/image/SkImage_Base.h"
20 #include "src/image/SkImage_GpuYUVA.h"
21
~PromiseImageCallbackContext()22 DDLPromiseImageHelper::PromiseImageCallbackContext::~PromiseImageCallbackContext() {
23 SkASSERT(fDoneCnt == fNumImages);
24 SkASSERT(!fUnreleasedFulfills);
25 SkASSERT(fTotalReleases == fTotalFulfills);
26 SkASSERT(!fTotalFulfills || fDoneCnt);
27
28 if (fPromiseImageTexture) {
29 fContext->deleteBackendTexture(fPromiseImageTexture->backendTexture());
30 }
31 }
32
setBackendTexture(const GrBackendTexture & backendTexture)33 void DDLPromiseImageHelper::PromiseImageCallbackContext::setBackendTexture(
34 const GrBackendTexture& backendTexture) {
35 SkASSERT(!fPromiseImageTexture);
36 SkASSERT(fBackendFormat == backendTexture.getBackendFormat());
37 fPromiseImageTexture = SkPromiseImageTexture::Make(backendTexture);
38 }
39
40 ///////////////////////////////////////////////////////////////////////////////////////////////////
41
deflateSKP(const SkPicture * inputPicture)42 sk_sp<SkData> DDLPromiseImageHelper::deflateSKP(const SkPicture* inputPicture) {
43 SkSerialProcs procs;
44
45 procs.fImageCtx = this;
46 procs.fImageProc = [](SkImage* image, void* ctx) -> sk_sp<SkData> {
47 auto helper = static_cast<DDLPromiseImageHelper*>(ctx);
48
49 int id = helper->findOrDefineImage(image);
50
51 // Even if 'id' is invalid (i.e., -1) write it to the SKP
52 return SkData::MakeWithCopy(&id, sizeof(id));
53 };
54
55 return inputPicture->serialize(&procs);
56 }
57
create_yuva_texture(GrContext * context,const SkPixmap & pm,const SkYUVAIndex yuvaIndices[4],int texIndex)58 static GrBackendTexture create_yuva_texture(GrContext* context, const SkPixmap& pm,
59 const SkYUVAIndex yuvaIndices[4], int texIndex) {
60 SkASSERT(texIndex >= 0 && texIndex <= 3);
61
62 #ifdef SK_DEBUG
63 int channelCount = 0;
64 for (int i = 0; i < SkYUVAIndex::kIndexCount; ++i) {
65 if (yuvaIndices[i].fIndex == texIndex) {
66 ++channelCount;
67 }
68 }
69 if (2 == channelCount) {
70 SkASSERT(kR8G8_unorm_SkColorType == pm.colorType());
71 }
72 #endif
73
74 return context->createBackendTexture(&pm, 1, GrRenderable::kNo, GrProtected::kNo);
75 }
76
77 /*
78 * Create backend textures and upload data to them for all the textures required to satisfy
79 * a single promise image.
80 * For YUV textures this will result in up to 4 actual textures.
81 */
CreateBETexturesForPromiseImage(GrContext * context,PromiseImageInfo * info)82 void DDLPromiseImageHelper::CreateBETexturesForPromiseImage(GrContext* context,
83 PromiseImageInfo* info) {
84 SkASSERT(context->priv().asDirectContext());
85
86 // DDL TODO: how can we tell if we need mipmapping!
87 if (info->isYUV()) {
88 int numPixmaps;
89 SkAssertResult(SkYUVAIndex::AreValidIndices(info->yuvaIndices(), &numPixmaps));
90 for (int j = 0; j < numPixmaps; ++j) {
91 const SkPixmap& yuvPixmap = info->yuvPixmap(j);
92
93 PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
94 SkASSERT(callbackContext);
95
96 callbackContext->setBackendTexture(create_yuva_texture(context, yuvPixmap,
97 info->yuvaIndices(), j));
98 SkASSERT(callbackContext->promiseImageTexture());
99 }
100 } else {
101 PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
102 if (!callbackContext) {
103 // This texture would've been too large to fit on the GPU
104 return;
105 }
106
107 const SkBitmap& bm = info->normalBitmap();
108
109 GrBackendTexture backendTex = context->createBackendTexture(
110 &bm.pixmap(), 1, GrRenderable::kNo,
111 GrProtected::kNo);
112 SkASSERT(backendTex.isValid());
113
114 callbackContext->setBackendTexture(backendTex);
115 }
116 }
117
createCallbackContexts(GrContext * context)118 void DDLPromiseImageHelper::createCallbackContexts(GrContext* context) {
119 const GrCaps* caps = context->priv().caps();
120 const int maxDimension = caps->maxTextureSize();
121
122 for (int i = 0; i < fImageInfo.count(); ++i) {
123 PromiseImageInfo& info = fImageInfo[i];
124
125 if (info.isYUV()) {
126 int numPixmaps;
127 SkAssertResult(SkYUVAIndex::AreValidIndices(info.yuvaIndices(), &numPixmaps));
128
129 for (int j = 0; j < numPixmaps; ++j) {
130 const SkPixmap& yuvPixmap = info.yuvPixmap(j);
131
132 GrBackendFormat backendFormat = context->defaultBackendFormat(yuvPixmap.colorType(),
133 GrRenderable::kNo);
134
135 sk_sp<PromiseImageCallbackContext> callbackContext(
136 new PromiseImageCallbackContext(context, backendFormat));
137
138 info.setCallbackContext(j, std::move(callbackContext));
139 }
140 } else {
141 const SkBitmap& bm = info.normalBitmap();
142
143 // TODO: explicitly mark the PromiseImageInfo as too big and check in uploadAllToGPU
144 if (maxDimension < std::max(bm.width(), bm.height())) {
145 // This won't fit on the GPU. Fallback to a raster-backed image per tile.
146 continue;
147 }
148
149 GrBackendFormat backendFormat = context->defaultBackendFormat(bm.pixmap().colorType(),
150 GrRenderable::kNo);
151 if (!caps->isFormatTexturable(backendFormat)) {
152 continue;
153 }
154
155
156 sk_sp<PromiseImageCallbackContext> callbackContext(
157 new PromiseImageCallbackContext(context, backendFormat));
158
159 info.setCallbackContext(0, std::move(callbackContext));
160 }
161 }
162 }
163
uploadAllToGPU(SkTaskGroup * taskGroup,GrContext * context)164 void DDLPromiseImageHelper::uploadAllToGPU(SkTaskGroup* taskGroup, GrContext* context) {
165 SkASSERT(context->priv().asDirectContext());
166
167 if (taskGroup) {
168 for (int i = 0; i < fImageInfo.count(); ++i) {
169 PromiseImageInfo* info = &fImageInfo[i];
170
171 taskGroup->add([context, info]() {
172 CreateBETexturesForPromiseImage(context, info);
173 });
174 }
175 } else {
176 for (int i = 0; i < fImageInfo.count(); ++i) {
177 CreateBETexturesForPromiseImage(context, &fImageInfo[i]);
178 }
179 }
180 }
181
reinflateSKP(SkDeferredDisplayListRecorder * recorder,SkData * compressedPictureData,SkTArray<sk_sp<SkImage>> * promiseImages) const182 sk_sp<SkPicture> DDLPromiseImageHelper::reinflateSKP(
183 SkDeferredDisplayListRecorder* recorder,
184 SkData* compressedPictureData,
185 SkTArray<sk_sp<SkImage>>* promiseImages) const {
186 PerRecorderContext perRecorderContext { recorder, this, promiseImages };
187
188 SkDeserialProcs procs;
189 procs.fImageCtx = (void*) &perRecorderContext;
190 procs.fImageProc = CreatePromiseImages;
191
192 return SkPicture::MakeFromData(compressedPictureData, &procs);
193 }
194
195 // This generates promise images to replace the indices in the compressed picture. This
196 // reconstitution is performed separately in each thread so we end up with multiple
197 // promise images referring to the same GrBackendTexture.
CreatePromiseImages(const void * rawData,size_t length,void * ctxIn)198 sk_sp<SkImage> DDLPromiseImageHelper::CreatePromiseImages(const void* rawData,
199 size_t length, void* ctxIn) {
200 PerRecorderContext* perRecorderContext = static_cast<PerRecorderContext*>(ctxIn);
201 const DDLPromiseImageHelper* helper = perRecorderContext->fHelper;
202 SkDeferredDisplayListRecorder* recorder = perRecorderContext->fRecorder;
203
204 SkASSERT(length == sizeof(int));
205
206 const int* indexPtr = static_cast<const int*>(rawData);
207 if (!helper->isValidID(*indexPtr)) {
208 return nullptr;
209 }
210
211 const DDLPromiseImageHelper::PromiseImageInfo& curImage = helper->getInfo(*indexPtr);
212
213 // If there is no callback context that means 'createCallbackContexts' determined the
214 // texture wouldn't fit on the GPU. Create a separate bitmap-backed image for each thread.
215 if (!curImage.isYUV() && !curImage.callbackContext(0)) {
216 SkASSERT(curImage.normalBitmap().isImmutable());
217 return SkImage::MakeFromBitmap(curImage.normalBitmap());
218 }
219
220 SkASSERT(curImage.index() == *indexPtr);
221
222 sk_sp<SkImage> image;
223 if (curImage.isYUV()) {
224 GrBackendFormat backendFormats[SkYUVASizeInfo::kMaxCount];
225 void* contexts[SkYUVASizeInfo::kMaxCount] = { nullptr, nullptr, nullptr, nullptr };
226 SkISize sizes[SkYUVASizeInfo::kMaxCount];
227 // TODO: store this value somewhere?
228 int textureCount;
229 SkAssertResult(SkYUVAIndex::AreValidIndices(curImage.yuvaIndices(), &textureCount));
230 for (int i = 0; i < textureCount; ++i) {
231 backendFormats[i] = curImage.backendFormat(i);
232 SkASSERT(backendFormats[i].isValid());
233 contexts[i] = curImage.refCallbackContext(i).release();
234 sizes[i].set(curImage.yuvPixmap(i).width(), curImage.yuvPixmap(i).height());
235 }
236 for (int i = textureCount; i < SkYUVASizeInfo::kMaxCount; ++i) {
237 sizes[i] = SkISize::MakeEmpty();
238 }
239
240 image = recorder->makeYUVAPromiseTexture(
241 curImage.yuvColorSpace(),
242 backendFormats,
243 sizes,
244 curImage.yuvaIndices(),
245 curImage.overallWidth(),
246 curImage.overallHeight(),
247 GrSurfaceOrigin::kTopLeft_GrSurfaceOrigin,
248 curImage.refOverallColorSpace(),
249 DDLPromiseImageHelper::PromiseImageFulfillProc,
250 DDLPromiseImageHelper::PromiseImageReleaseProc,
251 DDLPromiseImageHelper::PromiseImageDoneProc,
252 contexts,
253 SkDeferredDisplayListRecorder::PromiseImageApiVersion::kNew);
254 for (int i = 0; i < textureCount; ++i) {
255 curImage.callbackContext(i)->wasAddedToImage();
256 }
257
258 #ifdef SK_DEBUG
259 {
260 // By the peekProxy contract this image should not have a single backing proxy so
261 // should return null. The call should also not trigger the conversion to RGBA.
262 SkImage_GpuYUVA* yuva = reinterpret_cast<SkImage_GpuYUVA*>(image.get());
263 SkASSERT(!yuva->peekProxy());
264 SkASSERT(!yuva->peekProxy()); // the first call didn't force a conversion to RGBA
265 }
266 #endif
267 } else {
268 GrBackendFormat backendFormat = curImage.backendFormat(0);
269 SkASSERT(backendFormat.isValid());
270
271 // Each DDL recorder gets its own ref on the promise callback context for the
272 // promise images it creates.
273 // DDL TODO: sort out mipmapping
274 image = recorder->makePromiseTexture(
275 backendFormat,
276 curImage.overallWidth(),
277 curImage.overallHeight(),
278 GrMipMapped::kNo,
279 GrSurfaceOrigin::kTopLeft_GrSurfaceOrigin,
280 curImage.overallColorType(),
281 curImage.overallAlphaType(),
282 curImage.refOverallColorSpace(),
283 DDLPromiseImageHelper::PromiseImageFulfillProc,
284 DDLPromiseImageHelper::PromiseImageReleaseProc,
285 DDLPromiseImageHelper::PromiseImageDoneProc,
286 (void*)curImage.refCallbackContext(0).release(),
287 SkDeferredDisplayListRecorder::PromiseImageApiVersion::kNew);
288 curImage.callbackContext(0)->wasAddedToImage();
289 }
290 perRecorderContext->fPromiseImages->push_back(image);
291 SkASSERT(image);
292 return image;
293 }
294
findImage(SkImage * image) const295 int DDLPromiseImageHelper::findImage(SkImage* image) const {
296 for (int i = 0; i < fImageInfo.count(); ++i) {
297 if (fImageInfo[i].originalUniqueID() == image->uniqueID()) { // trying to dedup here
298 SkASSERT(fImageInfo[i].index() == i);
299 SkASSERT(this->isValidID(i) && this->isValidID(fImageInfo[i].index()));
300 return i;
301 }
302 }
303 return -1;
304 }
305
addImage(SkImage * image)306 int DDLPromiseImageHelper::addImage(SkImage* image) {
307 SkImage_Base* ib = as_IB(image);
308
309 SkImageInfo overallII = SkImageInfo::Make(image->width(), image->height(),
310 image->colorType() == kBGRA_8888_SkColorType
311 ? kRGBA_8888_SkColorType
312 : image->colorType(),
313 image->alphaType(),
314 image->refColorSpace());
315
316 PromiseImageInfo& newImageInfo = fImageInfo.emplace_back(fImageInfo.count(),
317 image->uniqueID(),
318 overallII);
319
320 SkYUVASizeInfo yuvaSizeInfo;
321 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount];
322 SkYUVColorSpace yuvColorSpace;
323 const void* planes[SkYUVASizeInfo::kMaxCount];
324 sk_sp<SkCachedData> yuvData = ib->getPlanes(&yuvaSizeInfo, yuvaIndices, &yuvColorSpace, planes);
325 if (yuvData) {
326 newImageInfo.setYUVData(std::move(yuvData), yuvaIndices, yuvColorSpace);
327
328 // determine colortypes from index data
329 // for testing we only ever use A8, RG_88
330 SkColorType colorTypes[SkYUVASizeInfo::kMaxCount] = {
331 kUnknown_SkColorType, kUnknown_SkColorType,
332 kUnknown_SkColorType, kUnknown_SkColorType
333 };
334 for (int yuvIndex = 0; yuvIndex < SkYUVAIndex::kIndexCount; ++yuvIndex) {
335 int texIdx = yuvaIndices[yuvIndex].fIndex;
336 if (texIdx < 0) {
337 SkASSERT(SkYUVAIndex::kA_Index == yuvIndex);
338 continue;
339 }
340 if (kUnknown_SkColorType == colorTypes[texIdx]) {
341 colorTypes[texIdx] = kAlpha_8_SkColorType;
342 } else {
343 colorTypes[texIdx] = kR8G8_unorm_SkColorType;
344 }
345 }
346
347 for (int i = 0; i < SkYUVASizeInfo::kMaxCount; ++i) {
348 if (yuvaSizeInfo.fSizes[i].isEmpty()) {
349 SkASSERT(!yuvaSizeInfo.fWidthBytes[i] && kUnknown_SkColorType == colorTypes[i]);
350 continue;
351 }
352
353 SkImageInfo planeII = SkImageInfo::Make(yuvaSizeInfo.fSizes[i].fWidth,
354 yuvaSizeInfo.fSizes[i].fHeight,
355 colorTypes[i],
356 kUnpremul_SkAlphaType);
357 newImageInfo.addYUVPlane(i, planeII, planes[i], yuvaSizeInfo.fWidthBytes[i]);
358 }
359 } else {
360 sk_sp<SkImage> rasterImage = image->makeRasterImage(); // force decoding of lazy images
361 if (!rasterImage) {
362 return -1;
363 }
364
365 SkBitmap tmp;
366 tmp.allocPixels(overallII);
367
368 if (!rasterImage->readPixels(tmp.pixmap(), 0, 0)) {
369 return -1;
370 }
371
372 tmp.setImmutable();
373 newImageInfo.setNormalBitmap(tmp);
374 }
375 // In either case newImageInfo's PromiseImageCallbackContext is filled in by uploadAllToGPU
376
377 return fImageInfo.count()-1;
378 }
379
findOrDefineImage(SkImage * image)380 int DDLPromiseImageHelper::findOrDefineImage(SkImage* image) {
381 int preExistingID = this->findImage(image);
382 if (preExistingID >= 0) {
383 SkASSERT(this->isValidID(preExistingID));
384 return preExistingID;
385 }
386
387 int newID = this->addImage(image);
388 return newID;
389 }
390