1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/GrSurfaceProxy.h"
9 #include "src/gpu/GrSurfaceProxyPriv.h"
10
11 #include "include/gpu/GrRecordingContext.h"
12 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
13 #include "include/gpu/vk/GrVulkanTrackerInterface.h"
14 #endif
15 #include "src/core/SkMathPriv.h"
16 #include "src/gpu/GrAttachment.h"
17 #include "src/gpu/GrCaps.h"
18 #include "src/gpu/GrGpuResourcePriv.h"
19 #include "src/gpu/GrImageInfo.h"
20 #include "src/gpu/GrRecordingContextPriv.h"
21 #include "src/gpu/GrResourceProvider.h"
22 #include "src/gpu/GrSurface.h"
23 #include "src/gpu/GrTexture.h"
24 #include "src/gpu/GrTextureRenderTargetProxy.h"
25 #include "src/gpu/SurfaceFillContext.h"
26 #ifdef SKIA_OHOS
27 #include "src/gpu/GrPerfMonitorReporter.h"
28 #endif
29
30 #ifdef SK_DEBUG
31 #include "include/gpu/GrDirectContext.h"
32 #include "src/gpu/GrDirectContextPriv.h"
33 #include "src/gpu/GrRenderTarget.h"
34
is_valid_lazy(const SkISize & dimensions,SkBackingFit fit)35 static bool is_valid_lazy(const SkISize& dimensions, SkBackingFit fit) {
36 // A "fully" lazy proxy's width and height are not known until instantiation time.
37 // So fully lazy proxies are created with width and height < 0. Regular lazy proxies must be
38 // created with positive widths and heights. The width and height are set to 0 only after a
39 // failed instantiation. The former must be "approximate" fit while the latter can be either.
40 return ((dimensions.fWidth < 0 && dimensions.fHeight < 0 && SkBackingFit::kApprox == fit) ||
41 (dimensions.fWidth > 0 && dimensions.fHeight > 0));
42 }
43
is_valid_non_lazy(SkISize dimensions)44 static bool is_valid_non_lazy(SkISize dimensions) {
45 return dimensions.fWidth > 0 && dimensions.fHeight > 0;
46 }
47 #endif
48
49 // emulator mock
50 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
51 #ifndef SK_VULKAN
52 namespace ParallelDebug {
IsVkImageDfxEnabled()53 bool IsVkImageDfxEnabled() { return false; }
RecordNodeId(uint64_t nodeId)54 void RecordNodeId(uint64_t nodeId) {}
GetNodeId()55 uint64_t GetNodeId() { return 0; }
56 };
57 #endif
58 #endif
59
60 // OH ISSUE: emulator mock
61 #ifdef SKIA_DFX_FOR_OHOS
62 #ifndef SK_VULKAN
63 namespace RealAllocConfig {
GetRealAllocStatus()64 bool GetRealAllocStatus() { return false; }
SetRealAllocStatus(bool ret)65 void SetRealAllocStatus(bool ret) { static_cast<void>(ret); }
66 };
67 #endif
68 #endif
69
70 // Deferred version
GrSurfaceProxy(const GrBackendFormat & format,SkISize dimensions,SkBackingFit fit,SkBudgeted budgeted,GrProtected isProtected,GrInternalSurfaceFlags surfaceFlags,UseAllocator useAllocator)71 GrSurfaceProxy::GrSurfaceProxy(const GrBackendFormat& format,
72 SkISize dimensions,
73 SkBackingFit fit,
74 SkBudgeted budgeted,
75 GrProtected isProtected,
76 GrInternalSurfaceFlags surfaceFlags,
77 UseAllocator useAllocator)
78 : fSurfaceFlags(surfaceFlags)
79 , fFormat(format)
80 , fDimensions(dimensions)
81 , fFit(fit)
82 , fBudgeted(budgeted)
83 , fUseAllocator(useAllocator)
84 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
85 , fNodeId(ParallelDebug::GetNodeId())
86 #endif
87 #ifdef SKIA_DFX_FOR_OHOS
88 , fRealAllocProxy(RealAllocConfig::GetRealAllocStatus())
89 #endif
90 , fIsProtected(isProtected) {
91 SkASSERT(fFormat.isValid());
92 SkASSERT(is_valid_non_lazy(dimensions));
93 }
94
95 // Lazy-callback version
GrSurfaceProxy(LazyInstantiateCallback && callback,const GrBackendFormat & format,SkISize dimensions,SkBackingFit fit,SkBudgeted budgeted,GrProtected isProtected,GrInternalSurfaceFlags surfaceFlags,UseAllocator useAllocator)96 GrSurfaceProxy::GrSurfaceProxy(LazyInstantiateCallback&& callback,
97 const GrBackendFormat& format,
98 SkISize dimensions,
99 SkBackingFit fit,
100 SkBudgeted budgeted,
101 GrProtected isProtected,
102 GrInternalSurfaceFlags surfaceFlags,
103 UseAllocator useAllocator)
104 : fSurfaceFlags(surfaceFlags)
105 , fFormat(format)
106 , fDimensions(dimensions)
107 , fFit(fit)
108 , fBudgeted(budgeted)
109 , fUseAllocator(useAllocator)
110 , fLazyInstantiateCallback(std::move(callback))
111 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
112 , fNodeId(ParallelDebug::GetNodeId())
113 #endif
114 #ifdef SKIA_DFX_FOR_OHOS
115 , fRealAllocProxy(RealAllocConfig::GetRealAllocStatus())
116 #endif
117 , fIsProtected(isProtected) {
118 SkASSERT(fFormat.isValid());
119 SkASSERT(fLazyInstantiateCallback);
120 SkASSERT(is_valid_lazy(dimensions, fit));
121 }
122
123 // Wrapped version
GrSurfaceProxy(sk_sp<GrSurface> surface,SkBackingFit fit,UseAllocator useAllocator)124 GrSurfaceProxy::GrSurfaceProxy(sk_sp<GrSurface> surface,
125 SkBackingFit fit,
126 UseAllocator useAllocator)
127 : fTarget(std::move(surface))
128 , fSurfaceFlags(fTarget->flags())
129 , fFormat(fTarget->backendFormat())
130 , fDimensions(fTarget->dimensions())
131 , fFit(fit)
132 , fBudgeted(fTarget->resourcePriv().budgetedType() == GrBudgetedType::kBudgeted
133 ? SkBudgeted::kYes
134 : SkBudgeted::kNo)
135 , fUseAllocator(useAllocator)
136 , fUniqueID(fTarget->uniqueID()) // Note: converting from unique resource ID to a proxy ID!
137 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
138 , fNodeId(ParallelDebug::GetNodeId())
139 #endif
140 #ifdef SKIA_DFX_FOR_OHOS
141 , fRealAllocProxy(RealAllocConfig::GetRealAllocStatus())
142 #endif
143 , fIsProtected(fTarget->isProtected() ? GrProtected::kYes : GrProtected::kNo) {
144 SkASSERT(fFormat.isValid());
145 }
146
~GrSurfaceProxy()147 GrSurfaceProxy::~GrSurfaceProxy() {
148 }
149
150 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
151 struct NodeIdHelper {
NodeIdHelperNodeIdHelper152 explicit inline NodeIdHelper(uint64_t nodeId): initNodeId_(ParallelDebug::GetNodeId())
153 {
154 ParallelDebug::RecordNodeId(nodeId);
155 }
~NodeIdHelperNodeIdHelper156 inline ~NodeIdHelper()
157 {
158 ParallelDebug::RecordNodeId(initNodeId_);
159 }
160 uint64_t initNodeId_;
161 };
162 #endif
163
createSurfaceImpl(GrResourceProvider * resourceProvider,int sampleCnt,GrRenderable renderable,GrMipmapped mipMapped) const164 sk_sp<GrSurface> GrSurfaceProxy::createSurfaceImpl(GrResourceProvider* resourceProvider,
165 int sampleCnt,
166 GrRenderable renderable,
167 GrMipmapped mipMapped) const {
168 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
169 NodeIdHelper helper(fNodeId);
170 #endif
171 SkASSERT(mipMapped == GrMipmapped::kNo || fFit == SkBackingFit::kExact);
172 SkASSERT(!this->isLazy());
173 SkASSERT(!fTarget);
174 #ifdef SKIA_OHOS
175 int64_t currentTime = GrPerfMonitorReporter::getCurrentTime();
176 #endif
177 sk_sp<GrSurface> surface;
178 if (SkBackingFit::kApprox == fFit) {
179 surface = resourceProvider->createApproxTexture(fDimensions,
180 fFormat,
181 fFormat.textureType(),
182 renderable,
183 sampleCnt,
184 fIsProtected);
185 } else {
186 surface = resourceProvider->createTexture(fDimensions,
187 fFormat,
188 fFormat.textureType(),
189 renderable,
190 sampleCnt,
191 mipMapped,
192 fBudgeted,
193 fIsProtected);
194 }
195 if (!surface) {
196 return nullptr;
197 }
198 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
199 if (ParallelDebug::IsVkImageDfxEnabled()) {
200 surface->updateNodeId(fNodeId);
201 }
202 #endif
203
204 if (fGrProxyTag.isGrTagValid()) {
205 #ifdef SKIA_DFX_FOR_OHOS
206 surface->setResourceTag(fGrProxyTag, fRealAllocProxy);
207 #else
208 surface->setResourceTag(fGrProxyTag);
209 #endif
210 #ifdef SKIA_OHOS
211 int64_t allocTime = GrPerfMonitorReporter::getCurrentTime() - currentTime;
212 GrPerfMonitorReporter::GetInstance().recordTextureNode(fGrProxyTag.fName, allocTime);
213 GrPerfMonitorReporter::GetInstance().recordTexturePerfEvent(fGrProxyTag.fName,
214 fGrProxyTag.fPid, static_cast<int32_t>(resourceProvider->getMaxResourceBytes()),
215 static_cast<int32_t>(resourceProvider->getBudgetedResourceBytes()), allocTime);
216 #endif
217 }
218 return surface;
219 }
220
canSkipResourceAllocator() const221 bool GrSurfaceProxy::canSkipResourceAllocator() const {
222 if (fUseAllocator == UseAllocator::kNo) {
223 // Usually an atlas or onFlush proxy
224 return true;
225 }
226
227 auto peek = this->peekSurface();
228 if (!peek) {
229 return false;
230 }
231 // If this resource is already allocated and not recyclable then the resource allocator does
232 // not need to do anything with it.
233 return !peek->resourcePriv().getScratchKey().isValid();
234 }
235
assign(sk_sp<GrSurface> surface)236 void GrSurfaceProxy::assign(sk_sp<GrSurface> surface) {
237 SkASSERT(!fTarget && surface);
238
239 SkDEBUGCODE(this->validateSurface(surface.get());)
240
241 fTarget = std::move(surface);
242
243 #ifdef SK_DEBUG
244 if (this->asRenderTargetProxy()) {
245 SkASSERT(fTarget->asRenderTarget());
246 }
247
248 // In order to give DDL users some flexibility in the destination of there DDLs,
249 // a DDL's target proxy can be more conservative (and thus require less memory)
250 // than the actual GrSurface used to fulfill it.
251 if (!this->isDDLTarget() && kInvalidGpuMemorySize != this->getRawGpuMemorySize_debugOnly()) {
252 // TODO(11373): Can this check be exact?
253 SkASSERT(fTarget->gpuMemorySize() <= this->getRawGpuMemorySize_debugOnly());
254 }
255 #endif
256 }
257
instantiateImpl(GrResourceProvider * resourceProvider,int sampleCnt,GrRenderable renderable,GrMipmapped mipMapped,const GrUniqueKey * uniqueKey)258 bool GrSurfaceProxy::instantiateImpl(GrResourceProvider* resourceProvider, int sampleCnt,
259 GrRenderable renderable, GrMipmapped mipMapped,
260 const GrUniqueKey* uniqueKey) {
261 SkASSERT(!this->isLazy());
262 if (fTarget) {
263 if (uniqueKey && uniqueKey->isValid()) {
264 SkASSERT(fTarget->getUniqueKey().isValid() && fTarget->getUniqueKey() == *uniqueKey);
265 }
266 return true;
267 }
268
269 sk_sp<GrSurface> surface = this->createSurfaceImpl(resourceProvider, sampleCnt, renderable,
270 mipMapped);
271 if (!surface) {
272 return false;
273 }
274
275 // If there was an invalidation message pending for this key, we might have just processed it,
276 // causing the key (stored on this proxy) to become invalid.
277 if (uniqueKey && uniqueKey->isValid()) {
278 resourceProvider->assignUniqueKeyToResource(*uniqueKey, surface.get());
279 }
280
281 this->assign(std::move(surface));
282
283 return true;
284 }
285
deinstantiate()286 void GrSurfaceProxy::deinstantiate() {
287 SkASSERT(this->isInstantiated());
288 fTarget = nullptr;
289 }
290
computeScratchKey(const GrCaps & caps,GrScratchKey * key) const291 void GrSurfaceProxy::computeScratchKey(const GrCaps& caps, GrScratchKey* key) const {
292 SkASSERT(!this->isFullyLazy());
293 GrRenderable renderable = GrRenderable::kNo;
294 int sampleCount = 1;
295 if (const auto* rtp = this->asRenderTargetProxy()) {
296 renderable = GrRenderable::kYes;
297 sampleCount = rtp->numSamples();
298 }
299
300 const GrTextureProxy* tp = this->asTextureProxy();
301 GrMipmapped mipMapped = GrMipmapped::kNo;
302 if (tp) {
303 mipMapped = tp->mipmapped();
304 }
305
306 GrTexture::ComputeScratchKey(caps, this->backendFormat(), this->backingStoreDimensions(),
307 renderable, sampleCount, mipMapped, fIsProtected, key);
308 }
309
backingStoreDimensions() const310 SkISize GrSurfaceProxy::backingStoreDimensions() const {
311 SkASSERT(!this->isFullyLazy());
312 if (fTarget) {
313 return fTarget->dimensions();
314 }
315
316 if (SkBackingFit::kExact == fFit) {
317 return fDimensions;
318 }
319 return GrResourceProvider::MakeApprox(fDimensions);
320 }
321
isFunctionallyExact() const322 bool GrSurfaceProxy::isFunctionallyExact() const {
323 SkASSERT(!this->isFullyLazy());
324 return fFit == SkBackingFit::kExact ||
325 fDimensions == GrResourceProvider::MakeApprox(fDimensions);
326 }
327
isFormatCompressed(const GrCaps * caps) const328 bool GrSurfaceProxy::isFormatCompressed(const GrCaps* caps) const {
329 return caps->isFormatCompressed(this->backendFormat());
330 }
331
332 #ifdef SK_DEBUG
validate(GrContext_Base * context) const333 void GrSurfaceProxy::validate(GrContext_Base* context) const {
334 if (fTarget) {
335 SkASSERT(fTarget->getContext()->priv().matches(context));
336 }
337 }
338 #endif
339
Copy(GrRecordingContext * rContext,sk_sp<GrSurfaceProxy> src,GrSurfaceOrigin origin,GrMipmapped mipMapped,SkIRect srcRect,SkBackingFit fit,SkBudgeted budgeted,RectsMustMatch rectsMustMatch,sk_sp<GrRenderTask> * outTask)340 sk_sp<GrSurfaceProxy> GrSurfaceProxy::Copy(GrRecordingContext* rContext,
341 sk_sp<GrSurfaceProxy> src,
342 GrSurfaceOrigin origin,
343 GrMipmapped mipMapped,
344 SkIRect srcRect,
345 SkBackingFit fit,
346 SkBudgeted budgeted,
347 RectsMustMatch rectsMustMatch,
348 sk_sp<GrRenderTask>* outTask) {
349 SkASSERT(!src->isFullyLazy());
350 int width;
351 int height;
352
353 SkIPoint dstPoint;
354 if (rectsMustMatch == RectsMustMatch::kYes) {
355 width = src->width();
356 height = src->height();
357 dstPoint = {srcRect.fLeft, srcRect.fTop};
358 } else {
359 width = srcRect.width();
360 height = srcRect.height();
361 dstPoint = {0, 0};
362 }
363
364 if (!srcRect.intersect(SkIRect::MakeSize(src->dimensions()))) {
365 return {};
366 }
367 auto format = src->backendFormat().makeTexture2D();
368 SkASSERT(format.isValid());
369
370 if (src->backendFormat().textureType() != GrTextureType::kExternal) {
371 GrImageInfo info(GrColorType::kUnknown, kUnknown_SkAlphaType, nullptr, {width, height});
372 auto dstContext = rContext->priv().makeSC(info,
373 format,
374 fit,
375 origin,
376 GrRenderable::kNo,
377 1,
378 mipMapped,
379 src->isProtected(),
380 budgeted);
381 sk_sp<GrRenderTask> copyTask;
382 if (dstContext && (copyTask = dstContext->copy(src, srcRect, dstPoint))) {
383 if (outTask) {
384 *outTask = std::move(copyTask);
385 }
386 return dstContext->asSurfaceProxyRef();
387 }
388 }
389 if (src->asTextureProxy()) {
390 auto dstContext = rContext->priv().makeSFC(kUnknown_SkAlphaType,
391 nullptr,
392 {width, height},
393 fit,
394 format,
395 1,
396 mipMapped,
397 src->isProtected(),
398 GrSwizzle::RGBA(),
399 GrSwizzle::RGBA(),
400 origin,
401 budgeted);
402 GrSurfaceProxyView view(std::move(src), origin, GrSwizzle::RGBA());
403 if (dstContext && dstContext->blitTexture(std::move(view), srcRect, dstPoint)) {
404 if (outTask) {
405 *outTask = dstContext->refRenderTask();
406 }
407 return dstContext->asSurfaceProxyRef();
408 }
409 }
410 // Can't use backend copies or draws.
411 return nullptr;
412 }
413
Copy(GrRecordingContext * context,sk_sp<GrSurfaceProxy> src,GrSurfaceOrigin origin,GrMipmapped mipMapped,SkBackingFit fit,SkBudgeted budgeted,sk_sp<GrRenderTask> * outTask)414 sk_sp<GrSurfaceProxy> GrSurfaceProxy::Copy(GrRecordingContext* context,
415 sk_sp<GrSurfaceProxy> src,
416 GrSurfaceOrigin origin,
417 GrMipmapped mipMapped,
418 SkBackingFit fit,
419 SkBudgeted budgeted,
420 sk_sp<GrRenderTask>* outTask) {
421 SkASSERT(!src->isFullyLazy());
422 auto rect = SkIRect::MakeSize(src->dimensions());
423 return Copy(context,
424 std::move(src),
425 origin,
426 mipMapped,
427 rect,
428 fit,
429 budgeted,
430 RectsMustMatch::kNo,
431 outTask);
432 }
433
434 #if GR_TEST_UTILS
testingOnly_getBackingRefCnt() const435 int32_t GrSurfaceProxy::testingOnly_getBackingRefCnt() const {
436 if (fTarget) {
437 return fTarget->testingOnly_getRefCnt();
438 }
439
440 return -1; // no backing GrSurface
441 }
442
testingOnly_getFlags() const443 GrInternalSurfaceFlags GrSurfaceProxy::testingOnly_getFlags() const {
444 return fSurfaceFlags;
445 }
446
dump() const447 SkString GrSurfaceProxy::dump() const {
448 SkString tmp;
449
450 tmp.appendf("proxyID: %d - surfaceID: %d",
451 this->uniqueID().asUInt(),
452 this->peekSurface() ? this->peekSurface()->uniqueID().asUInt()
453 : -1);
454 return tmp;
455 }
456
457 #endif
458
exactify(bool allocatedCaseOnly)459 void GrSurfaceProxyPriv::exactify(bool allocatedCaseOnly) {
460 SkASSERT(!fProxy->isFullyLazy());
461 if (this->isExact()) {
462 return;
463 }
464
465 SkASSERT(SkBackingFit::kApprox == fProxy->fFit);
466
467 if (fProxy->fTarget) {
468 // The kApprox but already instantiated case. Setting the proxy's width & height to
469 // the instantiated width & height could have side-effects going forward, since we're
470 // obliterating the area of interest information. This call (exactify) only used
471 // when converting an SkSpecialImage to an SkImage so the proxy shouldn't be
472 // used for additional draws.
473 fProxy->fDimensions = fProxy->fTarget->dimensions();
474 return;
475 }
476
477 #ifndef SK_CRIPPLE_TEXTURE_REUSE
478 // In the post-implicit-allocation world we can't convert this proxy to be exact fit
479 // at this point. With explicit allocation switching this to exact will result in a
480 // different allocation at flush time. With implicit allocation, allocation would occur
481 // at draw time (rather than flush time) so this pathway was encountered less often (if
482 // at all).
483 if (allocatedCaseOnly) {
484 return;
485 }
486 #endif
487
488 // The kApprox uninstantiated case. Making this proxy be exact should be okay.
489 // It could mess things up if prior decisions were based on the approximate size.
490 fProxy->fFit = SkBackingFit::kExact;
491 // If fGpuMemorySize is used when caching specialImages for the image filter DAG. If it has
492 // already been computed we want to leave it alone so that amount will be removed when
493 // the special image goes away. If it hasn't been computed yet it might as well compute the
494 // exact amount.
495 }
496
doLazyInstantiation(GrResourceProvider * resourceProvider)497 bool GrSurfaceProxyPriv::doLazyInstantiation(GrResourceProvider* resourceProvider) {
498 SkASSERT(fProxy->isLazy());
499
500 sk_sp<GrSurface> surface;
501 if (const auto& uniqueKey = fProxy->getUniqueKey(); uniqueKey.isValid()) {
502 // First try to reattach to a cached version if the proxy is uniquely keyed
503 surface = resourceProvider->findByUniqueKey<GrSurface>(uniqueKey);
504 }
505
506 bool syncKey = true;
507 bool releaseCallback = false;
508 if (!surface) {
509 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
510 NodeIdHelper helper(nodeId);
511 #endif
512 auto result = fProxy->fLazyInstantiateCallback(resourceProvider, fProxy->callbackDesc());
513 surface = std::move(result.fSurface);
514 syncKey = result.fKeyMode == GrSurfaceProxy::LazyInstantiationKeyMode::kSynced;
515 releaseCallback = surface && result.fReleaseCallback;
516 }
517 if (!surface) {
518 fProxy->fDimensions.setEmpty();
519 return false;
520 }
521 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
522 if (ParallelDebug::IsVkImageDfxEnabled()) {
523 surface->updateNodeId(nodeId);
524 }
525 #endif
526
527 if (fProxy->isFullyLazy()) {
528 // This was a fully lazy proxy. We need to fill in the width & height. For partially
529 // lazy proxies we must preserve the original width & height since that indicates
530 // the content area.
531 fProxy->fDimensions = surface->dimensions();
532 }
533
534 SkASSERT(fProxy->width() <= surface->width());
535 SkASSERT(fProxy->height() <= surface->height());
536
537 if (GrTextureProxy* texProxy = fProxy->asTextureProxy()) {
538 texProxy->setTargetKeySync(syncKey);
539 if (syncKey) {
540 const GrUniqueKey& key = texProxy->getUniqueKey();
541 if (key.isValid()) {
542 if (!surface->asTexture()->getUniqueKey().isValid()) {
543 // If 'surface' is newly created, attach the unique key
544 resourceProvider->assignUniqueKeyToResource(key, surface.get());
545 } else {
546 // otherwise we had better have reattached to a cached version
547 SkASSERT(surface->asTexture()->getUniqueKey() == key);
548 }
549 } else {
550 SkASSERT(!surface->getUniqueKey().isValid());
551 }
552 }
553 }
554
555 this->assign(std::move(surface));
556 if (releaseCallback) {
557 fProxy->fLazyInstantiateCallback = nullptr;
558 }
559
560 return true;
561 }
562
563 #ifdef SK_DEBUG
validateSurface(const GrSurface * surface)564 void GrSurfaceProxy::validateSurface(const GrSurface* surface) {
565 SkASSERTF(surface->backendFormat() == fFormat, "%s != %s",
566 surface->backendFormat().toStr().c_str(), fFormat.toStr().c_str());
567
568 this->onValidateSurface(surface);
569 }
570 #endif
571