1 /* 2 * Copyright 2008 The Android Open Source Project 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef SkPixelRef_DEFINED 9 #define SkPixelRef_DEFINED 10 11 #include "SkBitmap.h" 12 #include "SkDynamicAnnotations.h" 13 #include "SkRefCnt.h" 14 #include "SkString.h" 15 #include "SkImageInfo.h" 16 #include "SkSize.h" 17 #include "SkTDArray.h" 18 19 //#define xed 20 21 #ifdef SK_DEBUG 22 /** 23 * Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref 24 * subclasses to correctly handle lock/unlock pixels. For performance 25 * reasons, simple malloc-based subclasses call setPreLocked() to skip 26 * the overhead of implementing these calls. 27 * 28 * This build-flag disables that optimization, to add in debugging our 29 * call-sites, to ensure that they correctly balance their calls of 30 * lock and unlock. 31 */ 32 // #define SK_IGNORE_PIXELREF_SETPRELOCKED 33 #endif 34 35 class SkColorTable; 36 class SkData; 37 struct SkIRect; 38 class SkMutex; 39 40 class GrTexture; 41 42 /** \class SkPixelRef 43 44 This class is the smart container for pixel memory, and is used with 45 SkBitmap. A pixelref is installed into a bitmap, and then the bitmap can 46 access the actual pixel memory by calling lockPixels/unlockPixels. 47 48 This class can be shared/accessed between multiple threads. 49 */ 50 class SK_API SkPixelRef : public SkRefCnt { 51 public: 52 SK_DECLARE_INST_COUNT(SkPixelRef) 53 54 explicit SkPixelRef(const SkImageInfo&); 55 SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); 56 virtual ~SkPixelRef(); 57 info()58 const SkImageInfo& info() const { 59 return fInfo; 60 } 61 62 /** Return the pixel memory returned from lockPixels, or null if the 63 lockCount is 0. 64 */ pixels()65 void* pixels() const { return fRec.fPixels; } 66 67 /** Return the current colorTable (if any) if pixels are locked, or null. 68 */ colorTable()69 SkColorTable* colorTable() const { return fRec.fColorTable; } 70 rowBytes()71 size_t rowBytes() const { return fRec.fRowBytes; } 72 73 /** 74 * To access the actual pixels of a pixelref, it must be "locked". 75 * Calling lockPixels returns a LockRec struct (on success). 76 */ 77 struct LockRec { 78 void* fPixels; 79 SkColorTable* fColorTable; 80 size_t fRowBytes; 81 zeroLockRec82 void zero() { sk_bzero(this, sizeof(*this)); } 83 isZeroLockRec84 bool isZero() const { 85 return NULL == fPixels && NULL == fColorTable && 0 == fRowBytes; 86 } 87 }; 88 89 /** 90 * Returns true if the lockcount > 0 91 */ isLocked()92 bool isLocked() const { return fLockCount > 0; } 93 94 SkDEBUGCODE(int getLockCount() const { return fLockCount; }) 95 96 /** 97 * Call to access the pixel memory. Return true on success. Balance this 98 * with a call to unlockPixels(). 99 */ 100 bool lockPixels(); 101 102 /** 103 * Call to access the pixel memory. On success, return true and fill out 104 * the specified rec. On failure, return false and ignore the rec parameter. 105 * Balance this with a call to unlockPixels(). 106 */ 107 bool lockPixels(LockRec* rec); 108 109 /** Call to balanace a previous call to lockPixels(). Returns the pixels 110 (or null) after the unlock. NOTE: lock calls can be nested, but the 111 matching number of unlock calls must be made in order to free the 112 memory (if the subclass implements caching/deferred-decoding.) 113 */ 114 void unlockPixels(); 115 116 /** 117 * Some bitmaps can return a copy of their pixels for lockPixels(), but 118 * that copy, if modified, will not be pushed back. These bitmaps should 119 * not be used as targets for a raster device/canvas (since all pixels 120 * modifications will be lost when unlockPixels() is called.) 121 */ 122 bool lockPixelsAreWritable() const; 123 124 /** Returns a non-zero, unique value corresponding to the pixels in this 125 pixelref. Each time the pixels are changed (and notifyPixelsChanged is 126 called), a different generation ID will be returned. 127 */ 128 uint32_t getGenerationID() const; 129 130 /** 131 * Call this if you have changed the contents of the pixels. This will in- 132 * turn cause a different generation ID value to be returned from 133 * getGenerationID(). 134 */ 135 void notifyPixelsChanged(); 136 137 /** 138 * Change the info's AlphaType. Note that this does not automatically 139 * invalidate the generation ID. If the pixel values themselves have 140 * changed, then you must explicitly call notifyPixelsChanged() as well. 141 */ 142 void changeAlphaType(SkAlphaType at); 143 144 /** Returns true if this pixelref is marked as immutable, meaning that the 145 contents of its pixels will not change for the lifetime of the pixelref. 146 */ isImmutable()147 bool isImmutable() const { return fIsImmutable; } 148 149 /** Marks this pixelref is immutable, meaning that the contents of its 150 pixels will not change for the lifetime of the pixelref. This state can 151 be set on a pixelref, but it cannot be cleared once it is set. 152 */ 153 void setImmutable(); 154 155 /** Return the optional URI string associated with this pixelref. May be 156 null. 157 */ getURI()158 const char* getURI() const { return fURI.size() ? fURI.c_str() : NULL; } 159 160 /** Copy a URI string to this pixelref, or clear the URI if the uri is null 161 */ setURI(const char uri[])162 void setURI(const char uri[]) { 163 fURI.set(uri); 164 } 165 166 /** Copy a URI string to this pixelref 167 */ setURI(const char uri[],size_t len)168 void setURI(const char uri[], size_t len) { 169 fURI.set(uri, len); 170 } 171 172 /** Assign a URI string to this pixelref. 173 */ setURI(const SkString & uri)174 void setURI(const SkString& uri) { fURI = uri; } 175 176 /** 177 * If the pixelRef has an encoded (i.e. compressed) representation, 178 * return a ref to its data. If the pixelRef 179 * is uncompressed or otherwise does not have this form, return NULL. 180 * 181 * If non-null is returned, the caller is responsible for calling unref() 182 * on the data when it is finished. 183 */ refEncodedData()184 SkData* refEncodedData() { 185 return this->onRefEncodedData(); 186 } 187 188 /** 189 * Experimental -- tells the caller if it is worth it to call decodeInto(). 190 * Just an optimization at this point, to avoid checking the cache first. 191 * We may remove/change this call in the future. 192 */ implementsDecodeInto()193 bool implementsDecodeInto() { 194 return this->onImplementsDecodeInto(); 195 } 196 197 /** 198 * Return a decoded instance of this pixelRef in bitmap. If this cannot be 199 * done, return false and the bitmap parameter is ignored/unchanged. 200 * 201 * pow2 is the requeste power-of-two downscale that the caller needs. This 202 * can be ignored, and the "original" size can be returned, but if the 203 * underlying codec can efficiently return a smaller size, that should be 204 * done. Some examples: 205 * 206 * To request the "base" version (original scale), pass 0 for pow2 207 * To request 1/2 scale version (1/2 width, 1/2 height), pass 1 for pow2 208 * To request 1/4 scale version (1/4 width, 1/4 height), pass 2 for pow2 209 * ... 210 * 211 * If this returns true, then bitmap must be "locked" such that 212 * bitmap->getPixels() will return the correct address. 213 */ decodeInto(int pow2,SkBitmap * bitmap)214 bool decodeInto(int pow2, SkBitmap* bitmap) { 215 SkASSERT(pow2 >= 0); 216 return this->onDecodeInto(pow2, bitmap); 217 } 218 219 /** Are we really wrapping a texture instead of a bitmap? 220 */ getTexture()221 virtual GrTexture* getTexture() { return NULL; } 222 223 /** 224 * If any planes or rowBytes is NULL, this should output the sizes and return true 225 * if it can efficiently return YUV planar data. If it cannot, it should return false. 226 * 227 * If all planes and rowBytes are not NULL, then it should copy the associated Y,U,V data 228 * into those planes of memory supplied by the caller. It should validate that the sizes 229 * match what it expected. If the sizes do not match, it should return false. 230 * 231 * If colorSpace is not NULL, the YUV color space of the data should be stored in the address 232 * it points at. 233 */ getYUV8Planes(SkISize sizes[3],void * planes[3],size_t rowBytes[3],SkYUVColorSpace * colorSpace)234 bool getYUV8Planes(SkISize sizes[3], void* planes[3], size_t rowBytes[3], 235 SkYUVColorSpace* colorSpace) { 236 return this->onGetYUV8Planes(sizes, planes, rowBytes, colorSpace); 237 } 238 239 bool readPixels(SkBitmap* dst, const SkIRect* subset = NULL); 240 241 /** 242 * Makes a deep copy of this PixelRef, respecting the requested config. 243 * @param colorType Desired colortype. 244 * @param subset Subset of this PixelRef to copy. Must be fully contained within the bounds of 245 * of this PixelRef. 246 * @return A new SkPixelRef, or NULL if either there is an error (e.g. the destination could 247 * not be created with the given config), or this PixelRef does not support deep 248 * copies. 249 */ deepCopy(SkColorType colortype,const SkIRect * subset)250 virtual SkPixelRef* deepCopy(SkColorType colortype, const SkIRect* subset) { 251 return NULL; 252 } 253 254 #ifdef SK_BUILD_FOR_ANDROID 255 /** 256 * Acquire a "global" ref on this object. 257 * The default implementation just calls ref(), but subclasses can override 258 * this method to implement additional behavior. 259 */ 260 virtual void globalRef(void* data=NULL); 261 262 /** 263 * Release a "global" ref on this object. 264 * The default implementation just calls unref(), but subclasses can override 265 * this method to implement additional behavior. 266 */ 267 virtual void globalUnref(); 268 #endif 269 270 // Register a listener that may be called the next time our generation ID changes. 271 // 272 // We'll only call the listener if we're confident that we are the only SkPixelRef with this 273 // generation ID. If our generation ID changes and we decide not to call the listener, we'll 274 // never call it: you must add a new listener for each generation ID change. We also won't call 275 // the listener when we're certain no one knows what our generation ID is. 276 // 277 // This can be used to invalidate caches keyed by SkPixelRef generation ID. 278 struct GenIDChangeListener { ~GenIDChangeListenerGenIDChangeListener279 virtual ~GenIDChangeListener() {} 280 virtual void onChange() = 0; 281 }; 282 283 // Takes ownership of listener. 284 void addGenIDChangeListener(GenIDChangeListener* listener); 285 286 protected: 287 /** 288 * On success, returns true and fills out the LockRec for the pixels. On 289 * failure returns false and ignores the LockRec parameter. 290 * 291 * The caller will have already acquired a mutex for thread safety, so this 292 * method need not do that. 293 */ 294 virtual bool onNewLockPixels(LockRec*) = 0; 295 296 /** 297 * Balancing the previous successful call to onNewLockPixels. The locked 298 * pixel address will no longer be referenced, so the subclass is free to 299 * move or discard that memory. 300 * 301 * The caller will have already acquired a mutex for thread safety, so this 302 * method need not do that. 303 */ 304 virtual void onUnlockPixels() = 0; 305 306 /** Default impl returns true */ 307 virtual bool onLockPixelsAreWritable() const; 308 309 // returns false; 310 virtual bool onImplementsDecodeInto(); 311 // returns false; 312 virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); 313 314 /** 315 * For pixelrefs that don't have access to their raw pixels, they may be 316 * able to make a copy of them (e.g. if the pixels are on the GPU). 317 * 318 * The base class implementation returns false; 319 */ 320 virtual bool onReadPixels(SkBitmap* dst, const SkIRect* subsetOrNull); 321 322 // default impl returns NULL. 323 virtual SkData* onRefEncodedData(); 324 325 // default impl returns false. 326 virtual bool onGetYUV8Planes(SkISize sizes[3], void* planes[3], size_t rowBytes[3], 327 SkYUVColorSpace* colorSpace); 328 329 /** 330 * Returns the size (in bytes) of the internally allocated memory. 331 * This should be implemented in all serializable SkPixelRef derived classes. 332 * SkBitmap::fPixelRefOffset + SkBitmap::getSafeSize() should never overflow this value, 333 * otherwise the rendering code may attempt to read memory out of bounds. 334 * 335 * @return default impl returns 0. 336 */ 337 virtual size_t getAllocatedSizeInBytes() const; 338 339 /** Return the mutex associated with this pixelref. This value is assigned 340 in the constructor, and cannot change during the lifetime of the object. 341 */ mutex()342 SkBaseMutex* mutex() const { return fMutex; } 343 344 // only call from constructor. Flags this to always be locked, removing 345 // the need to grab the mutex and call onLockPixels/onUnlockPixels. 346 // Performance tweak to avoid those calls (esp. in multi-thread use case). 347 void setPreLocked(void*, size_t rowBytes, SkColorTable*); 348 349 private: 350 SkBaseMutex* fMutex; // must remain in scope for the life of this object 351 352 // mostly const. fInfo.fAlpahType can be changed at runtime. 353 const SkImageInfo fInfo; 354 355 // LockRec is only valid if we're in a locked state (isLocked()) 356 LockRec fRec; 357 int fLockCount; 358 359 mutable SkTRacy<uint32_t> fGenerationID; 360 mutable SkTRacy<bool> fUniqueGenerationID; 361 362 SkTDArray<GenIDChangeListener*> fGenIDChangeListeners; // pointers are owned 363 364 SkString fURI; 365 366 // can go from false to true, but never from true to false 367 bool fIsImmutable; 368 // only ever set in constructor, const after that 369 bool fPreLocked; 370 371 void needsNewGenID(); 372 void callGenIDChangeListeners(); 373 374 void setMutex(SkBaseMutex* mutex); 375 376 // When copying a bitmap to another with the same shape and config, we can safely 377 // clone the pixelref generation ID too, which makes them equivalent under caching. 378 friend class SkBitmap; // only for cloneGenID 379 void cloneGenID(const SkPixelRef&); 380 381 typedef SkRefCnt INHERITED; 382 }; 383 384 class SkPixelRefFactory : public SkRefCnt { 385 public: 386 /** 387 * Allocate a new pixelref matching the specified ImageInfo, allocating 388 * the memory for the pixels. If the ImageInfo requires a ColorTable, 389 * the pixelref will ref() the colortable. 390 * On failure return NULL. 391 */ 392 virtual SkPixelRef* create(const SkImageInfo&, size_t rowBytes, SkColorTable*) = 0; 393 }; 394 395 #endif 396