/external/skia/src/gpu/ |
D | GrNonAtomicRef.h | 21 GrNonAtomicRef() : fRefCnt(1) {} in GrNonAtomicRef() 26 SkASSERT((0 == fRefCnt || 1 == fRefCnt)); in ~GrNonAtomicRef() 28 fRefCnt = -10; in ~GrNonAtomicRef() 32 bool unique() const { return 1 == fRefCnt; } in unique() 36 int refCnt() const { return fRefCnt; } in refCnt() 40 SkASSERT(fRefCnt > 0); in ref() 41 ++fRefCnt; in ref() 45 SkASSERT(fRefCnt > 0); in unref() 46 --fRefCnt; in unref() 47 if (0 == fRefCnt) { in unref() [all …]
|
D | GrManagedResource.h | 79 GrManagedResource() : fRefCnt(1) { in GrManagedResource() 92 fRefCnt.store(0); // illegal value, to catch us if we reuse after delete in ~GrManagedResource() 98 int32_t getRefCnt() const { return fRefCnt.load(); } in getRefCnt() 108 return 1 == fRefCnt.load(std::memory_order_acquire); in unique() 116 SkDEBUGCODE(int newRefCount = )fRefCnt.fetch_add(+1, std::memory_order_relaxed); in ref() 127 int newRefCount = fRefCnt.fetch_add(-1, std::memory_order_acq_rel); in unref() 186 fRefCnt.store(1); in internal_dispose() 191 mutable std::atomic<int32_t> fRefCnt; variable
|
D | GrGpuResource.h | 31 bool unique() const { return fRefCnt == 1; } in unique() 37 (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); in ref() 48 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) { in unref() 70 GrIORef() : fRefCnt(1), fCommandBufferUsageCnt(0) {} in GrIORef() 79 SkASSERT(fRefCnt >= 0); in addInitialRef() 81 (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); in addInitialRef() 89 int32_t getRefCnt() const { return fRefCnt.load(std::memory_order_relaxed); } in getRefCnt() 101 mutable std::atomic<int32_t> fRefCnt; variable
|
/external/skqp/src/gpu/ |
D | GrNonAtomicRef.h | 21 GrNonAtomicRef() : fRefCnt(1) {} in GrNonAtomicRef() 26 SkASSERT((0 == fRefCnt || 1 == fRefCnt)); in ~GrNonAtomicRef() 28 fRefCnt = -10; in ~GrNonAtomicRef() 32 bool unique() const { return 1 == fRefCnt; } in unique() 36 SkASSERT(fRefCnt > 0); in ref() 37 ++fRefCnt; in ref() 41 SkASSERT(fRefCnt > 0); in unref() 42 --fRefCnt; in unref() 43 if (0 == fRefCnt) { in unref() 50 mutable int32_t fRefCnt;
|
/external/skqp/src/gpu/vk/ |
D | GrVkResource.h | 75 GrVkResource() : fRefCnt(1) { in GrVkResource() 88 fRefCnt.store(0); // illegal value, to catch us if we reuse after delete in ~GrVkResource() 94 int32_t getRefCnt() const { return fRefCnt.load(); } in getRefCnt() 104 return 1 == fRefCnt.load(std::memory_order_acquire); in unique() 112 SkDEBUGCODE(int newRefCount = )fRefCnt.fetch_add(+1, std::memory_order_relaxed); in ref() 124 int newRefCount = fRefCnt.fetch_add(-1, std::memory_order_acq_rel); in unref() 137 int newRefCount = fRefCnt.fetch_add(-1, std::memory_order_acq_rel); in unrefAndAbandon() 196 fRefCnt.store(1); in internal_dispose() 212 fRefCnt.store(1); in internal_dispose() 217 mutable std::atomic<int32_t> fRefCnt; variable
|
/external/skqp/src/core/ |
D | SkCachedData.cpp | 15 , fRefCnt(1) in SkCachedData() 26 , fRefCnt(1) in SkCachedData() 77 if ((1 == fRefCnt) && fInCache) { in inMutexRef() 81 fRefCnt += 1; in inMutexRef() 89 switch (--fRefCnt) { in inMutexUnref() 114 return 0 == fRefCnt; in inMutexUnref() 163 SkASSERT((fInCache && fRefCnt > 1) || !fInCache); in validate() 173 SkASSERT((fInCache && 1 == fRefCnt) || (0 == fRefCnt)); in validate()
|
D | SkRegionPriv.h | 61 std::atomic<int32_t> fRefCnt; 92 head->fRefCnt = 1; in Alloc() 115 SkASSERT(fRefCnt == 1); in writable_runs() 125 if (fRefCnt > 1) { in ensureWritable() 135 if (--fRefCnt == 0) { in ensureWritable()
|
D | SkRWBuffer.cpp | 68 mutable std::atomic<int32_t> fRefCnt; member 71 SkBufferHead(size_t capacity) : fRefCnt(1), fBlock(capacity) {} in SkBufferHead() 86 SkAssertResult(fRefCnt.fetch_add(+1, std::memory_order_relaxed)); in ref() 91 int32_t oldRefCnt = fRefCnt.fetch_add(-1, std::memory_order_acq_rel); in unref() 107 SkASSERT(fRefCnt.load(std::memory_order_relaxed) > 0); in validate()
|
D | SkCachedData.h | 31 int testing_only_getRefCnt() const { return fRefCnt; } in testing_only_getRefCnt() 57 int fRefCnt; // low-bit means we're owned by the cache variable
|
D | SkString.cpp | 230 SkAssertResult(this->fRefCnt.fetch_add(+1, std::memory_order_relaxed)); in ref() 237 int32_t oldRefCnt = this->fRefCnt.fetch_add(-1, std::memory_order_acq_rel); in unref() 245 return fRefCnt.load(std::memory_order_acquire) == 1; in unique() 252 SkASSERT(0 == gEmptyRec.fRefCnt.load(std::memory_order_relaxed)); in validate() 257 SkASSERT(fRec->fRefCnt.load(std::memory_order_relaxed) > 0); in validate()
|
/external/skia/src/core/ |
D | SkCachedData.cpp | 15 , fRefCnt(1) in SkCachedData() 26 , fRefCnt(1) in SkCachedData() 77 if ((1 == fRefCnt) && fInCache) { in inMutexRef() 81 fRefCnt += 1; in inMutexRef() 89 switch (--fRefCnt) { in inMutexUnref() 114 return 0 == fRefCnt; in inMutexUnref() 163 SkASSERT((fInCache && fRefCnt > 1) || !fInCache); in validate() 173 SkASSERT((fInCache && 1 == fRefCnt) || (0 == fRefCnt)); in validate()
|
D | SkRegionPriv.h | 61 std::atomic<int32_t> fRefCnt; 92 head->fRefCnt = 1; in Alloc() 115 SkASSERT(fRefCnt == 1); in writable_runs() 125 if (fRefCnt > 1) { in ensureWritable() 135 if (--fRefCnt == 0) { in ensureWritable()
|
D | SkCachedData.h | 31 int testing_only_getRefCnt() const { return fRefCnt; } in testing_only_getRefCnt() 57 int fRefCnt; // low-bit means we're owned by the cache variable
|
D | SkString.cpp | 234 SkAssertResult(this->fRefCnt.fetch_add(+1, std::memory_order_relaxed)); in ref() 241 int32_t oldRefCnt = this->fRefCnt.fetch_add(-1, std::memory_order_acq_rel); in unref() 249 return fRefCnt.load(std::memory_order_acquire) == 1; in unique() 256 SkASSERT(0 == gEmptyRec.fRefCnt.load(std::memory_order_relaxed)); in validate() 261 SkASSERT(fRec->fRefCnt.load(std::memory_order_relaxed) > 0); in validate()
|
D | SkAAClip.cpp | 62 std::atomic<int32_t> fRefCnt; member 82 head->fRefCnt.store(1); in Alloc() 201 SkASSERT(head->fRefCnt.load() > 0); in validate() 536 SkASSERT(fRunHead->fRefCnt.load() >= 1); in freeRuns() 537 if (1 == fRunHead->fRefCnt--) { in freeRuns() 567 fRunHead->fRefCnt++; in operator =() 1760 fRunHead->fRefCnt++; in translate()
|
/external/skia/include/core/ |
D | SkRefCnt.h | 34 SkRefCntBase() : fRefCnt(1) {} in SkRefCntBase() 42 fRefCnt.store(0, std::memory_order_relaxed); in ~SkRefCntBase() 50 if (1 == fRefCnt.load(std::memory_order_acquire)) { in unique() 64 (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); in ref() 74 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) { in unref() 86 return fRefCnt.load(std::memory_order_relaxed); in getRefCnt() 96 fRefCnt.store(1, std::memory_order_relaxed); in internal_dispose() 105 mutable std::atomic<int32_t> fRefCnt; variable 161 SkNVRefCnt() : fRefCnt(1) {} in SkNVRefCnt() 164 int rc = fRefCnt.load(std::memory_order_relaxed); in ~SkNVRefCnt() [all …]
|
D | SkString.h | 239 constexpr Rec(uint32_t len, int32_t refCnt) : fLength(len), fRefCnt(refCnt) {} in Rec() 248 mutable std::atomic<int32_t> fRefCnt; member
|
/external/skqp/include/private/ |
D | GrSurfaceProxy.h | 37 ++fRefCnt; in ref() 50 --fRefCnt; in unref() 56 SkASSERT(fRefCnt >= 0 && fPendingWrites >= 0 && fPendingReads >= 0); in isUnique_debugOnly() 57 return 1 == fRefCnt + fPendingWrites + fPendingReads; in isUnique_debugOnly() 73 for (int refs = fRefCnt; refs; --refs) { in release() 81 SkASSERT(fRefCnt >= 0); in validate() 84 SkASSERT(fRefCnt + fPendingReads + fPendingWrites >= 1); in validate() 90 SkASSERT(fTarget->fRefCnt >= fRefCnt); in validate() 142 GrIORefProxy() : fTarget(nullptr), fRefCnt(1), fPendingReads(0), fPendingWrites(0) {} in GrIORefProxy() 143 GrIORefProxy(sk_sp<GrSurface> surface) : fRefCnt(1), fPendingReads(0), fPendingWrites(0) { in GrIORefProxy() [all …]
|
D | SkWeakRefCnt.h | 82 int32_t prev = fRefCnt.load(std::memory_order_relaxed); in atomic_conditional_acquire_strong_ref() 87 } while(!fRefCnt.compare_exchange_weak(prev, prev+1, std::memory_order_acquire, in atomic_conditional_acquire_strong_ref() 142 return fRefCnt.load(std::memory_order_relaxed) == 0; in weak_expired()
|
/external/skqp/include/gpu/ |
D | GrGpuResource.h | 55 ++fRefCnt; in ref() 61 if (!(--fRefCnt)) { in unref() 72 SkASSERT(fRefCnt >= 0); in validate() 75 SkASSERT(fRefCnt + fPendingReads + fPendingWrites >= 0); in validate() 80 GrIORef() : fRefCnt(1), fPendingReads(0), fPendingWrites(0) { } in GrIORef() 92 bool internalHasRef() const { return SkToBool(fRefCnt); } in internalHasRef() 93 bool internalHasUniqueRef() const { return fRefCnt == 1; } in internalHasUniqueRef() 125 if (0 == fPendingReads && 0 == fPendingWrites && 0 == fRefCnt) { in didRemoveRefOrPendingIO() 130 mutable int32_t fRefCnt; variable
|
/external/skqp/include/core/ |
D | SkRefCnt.h | 35 SkRefCntBase() : fRefCnt(1) {} in SkRefCntBase() 43 fRefCnt.store(0, std::memory_order_relaxed); in ~SkRefCntBase() 51 if (1 == fRefCnt.load(std::memory_order_acquire)) { in unique() 65 (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); in ref() 75 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) { in unref() 87 return fRefCnt.load(std::memory_order_relaxed); in getRefCnt() 97 fRefCnt.store(1, std::memory_order_relaxed); in internal_dispose() 106 mutable std::atomic<int32_t> fRefCnt; variable 162 SkNVRefCnt() : fRefCnt(1) {} in SkNVRefCnt() 165 int rc = fRefCnt.load(std::memory_order_relaxed); in ~SkNVRefCnt() [all …]
|
D | SkString.h | 243 : fLength(len), fRefCnt(refCnt), fBeginningOfData(0) in Rec() 247 mutable std::atomic<int32_t> fRefCnt; member
|
/external/skia/tests/ |
D | RefCntTest.cpp | 91 Effect() : fRefCnt(1) { in Effect() 96 int fRefCnt; member in Effect 100 fRefCnt += 1; in ref() 105 SkASSERT(fRefCnt > 0); in unref() 106 if (0 == --fRefCnt) { in unref() 160 REPORTER_ASSERT(reporter, paint.fEffect.get()->fRefCnt == 1); in DEF_TEST() 193 REPORTER_ASSERT(reporter, paint.fEffect.get()->fRefCnt == 2); in DEF_TEST() 198 REPORTER_ASSERT(reporter, paint.fEffect.get()->fRefCnt == 3); in DEF_TEST()
|
/external/skqp/tests/ |
D | RefCntTest.cpp | 91 Effect() : fRefCnt(1) { in Effect() 96 int fRefCnt; member in Effect 100 fRefCnt += 1; in ref() 105 SkASSERT(fRefCnt > 0); in unref() 106 if (0 == --fRefCnt) { in unref() 160 REPORTER_ASSERT(reporter, paint.fEffect.get()->fRefCnt == 1); in DEF_TEST() 193 REPORTER_ASSERT(reporter, paint.fEffect.get()->fRefCnt == 2); in DEF_TEST() 198 REPORTER_ASSERT(reporter, paint.fEffect.get()->fRefCnt == 3); in DEF_TEST()
|
/external/skia/include/private/ |
D | SkWeakRefCnt.h | 82 int32_t prev = fRefCnt.load(std::memory_order_relaxed); in atomic_conditional_acquire_strong_ref() 87 } while(!fRefCnt.compare_exchange_weak(prev, prev+1, std::memory_order_acquire, in atomic_conditional_acquire_strong_ref() 142 return fRefCnt.load(std::memory_order_relaxed) == 0; in weak_expired()
|