1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/image/SkImage_GpuBase.h"
9
10 #include "include/core/SkBitmap.h"
11 #include "include/core/SkPromiseImageTexture.h"
12 #include "include/gpu/GrBackendSurface.h"
13 #include "include/gpu/GrDirectContext.h"
14 #include "include/gpu/GrRecordingContext.h"
15 #include "include/gpu/GrYUVABackendTextures.h"
16 #include "src/core/SkBitmapCache.h"
17 #include "src/gpu/GrDirectContextPriv.h"
18 #include "src/gpu/GrImageContextPriv.h"
19 #include "src/gpu/GrImageInfo.h"
20 #include "src/gpu/GrProxyProvider.h"
21 #include "src/gpu/GrRecordingContextPriv.h"
22 #include "src/gpu/GrResourceProvider.h"
23 #include "src/gpu/GrTexture.h"
24 #include "src/gpu/GrYUVATextureProxies.h"
25 #include "src/gpu/SurfaceContext.h"
26 #include "src/gpu/effects/GrYUVtoRGBEffect.h"
27 #include "src/image/SkImage_Gpu.h"
28 #include "src/image/SkReadPixelsRec.h"
29
SkImage_GpuBase(sk_sp<GrImageContext> context,SkImageInfo info,uint32_t uniqueID)30 SkImage_GpuBase::SkImage_GpuBase(sk_sp<GrImageContext> context, SkImageInfo info, uint32_t uniqueID)
31 : INHERITED(std::move(info), uniqueID)
32 , fContext(std::move(context)) {}
33
34 //////////////////////////////////////////////////////////////////////////////////////////////////
35
ValidateBackendTexture(const GrCaps * caps,const GrBackendTexture & tex,GrColorType grCT,SkColorType ct,SkAlphaType at,sk_sp<SkColorSpace> cs)36 bool SkImage_GpuBase::ValidateBackendTexture(const GrCaps* caps, const GrBackendTexture& tex,
37 GrColorType grCT, SkColorType ct, SkAlphaType at,
38 sk_sp<SkColorSpace> cs) {
39 if (!tex.isValid()) {
40 return false;
41 }
42 SkColorInfo info(ct, at, cs);
43 if (!SkColorInfoIsValid(info)) {
44 return false;
45 }
46 GrBackendFormat backendFormat = tex.getBackendFormat();
47 if (!backendFormat.isValid()) {
48 return false;
49 }
50
51 return caps->areColorTypeAndFormatCompatible(grCT, backendFormat);
52 }
53
ValidateCompressedBackendTexture(const GrCaps * caps,const GrBackendTexture & tex,SkAlphaType at)54 bool SkImage_GpuBase::ValidateCompressedBackendTexture(const GrCaps* caps,
55 const GrBackendTexture& tex,
56 SkAlphaType at) {
57 if (!tex.isValid() || tex.width() <= 0 || tex.height() <= 0) {
58 return false;
59 }
60
61 if (tex.width() > caps->maxTextureSize() || tex.height() > caps->maxTextureSize()) {
62 return false;
63 }
64
65 if (at == kUnknown_SkAlphaType) {
66 return false;
67 }
68
69 GrBackendFormat backendFormat = tex.getBackendFormat();
70 if (!backendFormat.isValid()) {
71 return false;
72 }
73
74 if (!caps->isFormatCompressed(backendFormat)) {
75 return false;
76 }
77
78 return true;
79 }
80
81 //////////////////////////////////////////////////////////////////////////////////////////////////
82
getROPixels(GrDirectContext * dContext,SkBitmap * dst,CachingHint chint) const83 bool SkImage_GpuBase::getROPixels(GrDirectContext* dContext,
84 SkBitmap* dst,
85 CachingHint chint) const {
86 if (!fContext->priv().matches(dContext)) {
87 return false;
88 }
89
90 const auto desc = SkBitmapCacheDesc::Make(this);
91 if (SkBitmapCache::Find(desc, dst)) {
92 SkASSERT(dst->isImmutable());
93 SkASSERT(dst->getPixels());
94 return true;
95 }
96
97 SkBitmapCache::RecPtr rec = nullptr;
98 SkPixmap pmap;
99 if (kAllow_CachingHint == chint) {
100 rec = SkBitmapCache::Alloc(desc, this->imageInfo(), &pmap);
101 if (!rec) {
102 return false;
103 }
104 } else {
105 if (!dst->tryAllocPixels(this->imageInfo()) || !dst->peekPixels(&pmap)) {
106 return false;
107 }
108 }
109
110 auto [view, ct] = this->asView(dContext, GrMipmapped::kNo);
111 if (!view) {
112 return false;
113 }
114
115 GrColorInfo colorInfo(ct, this->alphaType(), this->refColorSpace());
116 auto sContext = dContext->priv().makeSC(std::move(view), std::move(colorInfo));
117 if (!sContext) {
118 return false;
119 }
120
121 if (!sContext->readPixels(dContext, pmap, {0, 0})) {
122 return false;
123 }
124
125 if (rec) {
126 SkBitmapCache::Add(std::move(rec), dst);
127 this->notifyAddedToRasterCache();
128 }
129 return true;
130 }
131
onMakeSubset(const SkIRect & subset,GrDirectContext * direct) const132 sk_sp<SkImage> SkImage_GpuBase::onMakeSubset(const SkIRect& subset,
133 GrDirectContext* direct) const {
134 if (!fContext->priv().matches(direct)) {
135 return nullptr;
136 }
137
138 auto [view, ct] = this->asView(direct, GrMipmapped::kNo);
139 SkASSERT(view);
140 SkASSERT(ct == SkColorTypeToGrColorType(this->colorType()));
141
142 SkBudgeted isBudgeted = view.proxy()->isBudgeted();
143 auto copyView = GrSurfaceProxyView::Copy(direct,
144 std::move(view),
145 GrMipmapped::kNo,
146 subset,
147 SkBackingFit::kExact,
148 isBudgeted);
149
150 if (!copyView) {
151 return nullptr;
152 }
153
154 return sk_make_sp<SkImage_Gpu>(sk_ref_sp(direct),
155 kNeedNewImageUniqueID,
156 std::move(copyView),
157 this->imageInfo().colorInfo());
158 }
159
onReadPixels(GrDirectContext * dContext,const SkImageInfo & dstInfo,void * dstPixels,size_t dstRB,int srcX,int srcY,CachingHint) const160 bool SkImage_GpuBase::onReadPixels(GrDirectContext* dContext,
161 const SkImageInfo& dstInfo,
162 void* dstPixels,
163 size_t dstRB,
164 int srcX,
165 int srcY,
166 CachingHint) const {
167 if (!fContext->priv().matches(dContext) ||
168 !SkImageInfoValidConversion(dstInfo, this->imageInfo())) {
169 return false;
170 }
171
172 auto [view, ct] = this->asView(dContext, GrMipmapped::kNo);
173 SkASSERT(view);
174
175 GrColorInfo colorInfo(ct, this->alphaType(), this->refColorSpace());
176 auto sContext = dContext->priv().makeSC(std::move(view), colorInfo);
177 if (!sContext) {
178 return false;
179 }
180
181 return sContext->readPixels(dContext, {dstInfo, dstPixels, dstRB}, {srcX, srcY});
182 }
183
onIsValid(GrRecordingContext * context) const184 bool SkImage_GpuBase::onIsValid(GrRecordingContext* context) const {
185 // The base class has already checked that 'context' isn't abandoned (if it's not nullptr)
186 if (fContext->priv().abandoned()) {
187 return false;
188 }
189
190 if (context && !fContext->priv().matches(context)) {
191 return false;
192 }
193
194 return true;
195 }
196
MakePromiseImageLazyProxy(GrContextThreadSafeProxy * tsp,SkISize dimensions,GrBackendFormat backendFormat,GrMipmapped mipMapped,PromiseImageTextureFulfillProc fulfillProc,sk_sp<GrRefCntedCallback> releaseHelper)197 sk_sp<GrTextureProxy> SkImage_GpuBase::MakePromiseImageLazyProxy(
198 GrContextThreadSafeProxy* tsp,
199 SkISize dimensions,
200 GrBackendFormat backendFormat,
201 GrMipmapped mipMapped,
202 PromiseImageTextureFulfillProc fulfillProc,
203 sk_sp<GrRefCntedCallback> releaseHelper) {
204 SkASSERT(tsp);
205 SkASSERT(!dimensions.isEmpty());
206 SkASSERT(releaseHelper);
207
208 if (!fulfillProc) {
209 return nullptr;
210 }
211
212 if (mipMapped == GrMipmapped::kYes &&
213 GrTextureTypeHasRestrictedSampling(backendFormat.textureType())) {
214 // It is invalid to have a GL_TEXTURE_EXTERNAL or GL_TEXTURE_RECTANGLE and have mips as
215 // well.
216 return nullptr;
217 }
218
219 /**
220 * This class is the lazy instantiation callback for promise images. It manages calling the
221 * client's Fulfill, Release, and Done procs. It attempts to reuse a GrTexture instance in
222 * cases where the client provides the same SkPromiseImageTexture as Fulfill results for
223 * multiple SkImages. The created GrTexture is given a key based on a unique ID associated with
224 * the SkPromiseImageTexture.
225 *
226 * The GrTexutre idle proc mechanism is used to call the Release and Done procs. We use this
227 * instead of the GrSurface release proc because the GrTexture is cached and therefore may
228 * outlive the proxy into which this callback is installed.
229 *
230 * A key invalidation message is installed on the SkPromiseImageTexture so that the GrTexture
231 * is deleted once it can no longer be used to instantiate a proxy.
232 */
233 class PromiseLazyInstantiateCallback {
234 public:
235 PromiseLazyInstantiateCallback(PromiseImageTextureFulfillProc fulfillProc,
236 sk_sp<GrRefCntedCallback> releaseHelper)
237 : fFulfillProc(fulfillProc), fReleaseHelper(std::move(releaseHelper)) {}
238 PromiseLazyInstantiateCallback(PromiseLazyInstantiateCallback&&) = default;
239 PromiseLazyInstantiateCallback(const PromiseLazyInstantiateCallback&) {
240 // Because we get wrapped in std::function we must be copyable. But we should never
241 // be copied.
242 SkASSERT(false);
243 }
244 PromiseLazyInstantiateCallback& operator=(PromiseLazyInstantiateCallback&&) = default;
245 PromiseLazyInstantiateCallback& operator=(const PromiseLazyInstantiateCallback&) {
246 SkASSERT(false);
247 return *this;
248 }
249
250 ~PromiseLazyInstantiateCallback() {
251 // Our destructor can run on any thread. We trigger the unref of fTexture by message.
252 // This unreffed texture pointer is a real problem! When the context has been
253 // abandoned, the GrTexture pointed to by this pointer is deleted! Due to virtual
254 // inheritance any manipulation of this pointer at that point will cause a crash.
255 // For now we "work around" the problem by just passing it, untouched, into the
256 // message bus but this very fragile.
257 // In the future the GrSurface class hierarchy refactoring should eliminate this
258 // difficulty by removing the virtual inheritance.
259 if (fTexture) {
260 GrTextureFreedMessage msg { fTexture, fTextureContextID };
261 SkMessageBus<GrTextureFreedMessage, GrDirectContext::DirectContextID>::Post(msg);
262 }
263 }
264
265 GrSurfaceProxy::LazyCallbackResult operator()(GrResourceProvider* resourceProvider,
266 const GrSurfaceProxy::LazySurfaceDesc&) {
267 // We use the unique key in a way that is unrelated to the SkImage-based key that the
268 // proxy may receive, hence kUnsynced.
269 static constexpr auto kKeySyncMode =
270 GrSurfaceProxy::LazyInstantiationKeyMode::kUnsynced;
271
272 // In order to make the SkImage "thread safe" we rely on holding an extra ref to the
273 // texture in the callback and signalling the unref via a message to the resource cache.
274 // We need to extend the callback's lifetime to that of the proxy.
275 static constexpr auto kReleaseCallbackOnInstantiation = false;
276
277 // Our proxy is getting instantiated for the second+ time. We are only allowed to call
278 // Fulfill once. So return our cached result.
279 if (fTexture) {
280 return {sk_ref_sp(fTexture), kReleaseCallbackOnInstantiation, kKeySyncMode};
281 } else if (fFulfillProcFailed) {
282 // We've already called fulfill and it failed. Our contract says that we should only
283 // call each callback once.
284 return {};
285 }
286
287 PromiseImageTextureContext textureContext = fReleaseHelper->context();
288 sk_sp<SkPromiseImageTexture> promiseTexture = fFulfillProc(textureContext);
289
290 if (!promiseTexture) {
291 fFulfillProcFailed = true;
292 return {};
293 }
294
295 const GrBackendTexture& backendTexture = promiseTexture->backendTexture();
296 if (!backendTexture.isValid()) {
297 return {};
298 }
299
300 sk_sp<GrTexture> tex = resourceProvider->wrapBackendTexture(backendTexture,
301 kBorrow_GrWrapOwnership,
302 GrWrapCacheable::kNo,
303 kRead_GrIOType);
304 if (!tex) {
305 return {};
306 }
307 tex->setRelease(fReleaseHelper);
308 fTexture = tex.get();
309 // We need to hold on to the GrTexture in case our proxy gets reinstantiated. However,
310 // we can't unref in our destructor because we may be on another thread then. So we
311 // let the cache know it is waiting on an unref message. We will send that message from
312 // our destructor.
313 auto dContext = fTexture->getContext();
314 dContext->priv().getResourceCache()->insertDelayedTextureUnref(fTexture);
315 fTextureContextID = dContext->directContextID();
316 return {std::move(tex), kReleaseCallbackOnInstantiation, kKeySyncMode};
317 }
318
319 private:
320 PromiseImageTextureFulfillProc fFulfillProc;
321 sk_sp<GrRefCntedCallback> fReleaseHelper;
322 GrTexture* fTexture = nullptr;
323 GrDirectContext::DirectContextID fTextureContextID;
324 bool fFulfillProcFailed = false;
325 } callback(fulfillProc, std::move(releaseHelper));
326
327 return GrProxyProvider::CreatePromiseProxy(tsp, std::move(callback), backendFormat, dimensions,
328 mipMapped);
329 }
330