1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.NonNull; 20 import android.annotation.Nullable; 21 import android.annotation.SuppressLint; 22 import android.annotation.TestApi; 23 import android.compat.annotation.UnsupportedAppUsage; 24 import android.graphics.Rect; 25 import android.hardware.DataSpace; 26 import android.hardware.DataSpace.NamedDataSpace; 27 import android.hardware.HardwareBuffer; 28 import android.hardware.SyncFence; 29 30 import java.io.IOException; 31 import java.nio.ByteBuffer; 32 33 /** 34 * <p>A single complete image buffer to use with a media source such as a 35 * {@link MediaCodec} or a 36 * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p> 37 * 38 * <p>This class allows for efficient direct application access to the pixel 39 * data of the Image through one or more 40 * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a 41 * {@link Plane} that describes the layout of the pixel data in that plane. Due 42 * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class, 43 * Images are not directly usable as UI resources.</p> 44 * 45 * <p>Since Images are often directly produced or consumed by hardware 46 * components, they are a limited resource shared across the system, and should 47 * be closed as soon as they are no longer needed.</p> 48 * 49 * <p>For example, when using the {@link ImageReader} class to read out Images 50 * from various media sources, not closing old Image objects will prevent the 51 * availability of new Images once 52 * {@link ImageReader#getMaxImages the maximum outstanding image count} is 53 * reached. When this happens, the function acquiring new Images will typically 54 * throw an {@link IllegalStateException}.</p> 55 * 56 * @see ImageReader 57 */ 58 public abstract class Image implements AutoCloseable { 59 /** 60 * @hide 61 */ 62 protected boolean mIsImageValid = false; 63 64 /** 65 * @hide 66 */ 67 @UnsupportedAppUsage 68 @TestApi Image()69 protected Image() { 70 } 71 72 /** 73 * Throw IllegalStateException if the image is invalid (already closed). 74 * 75 * @hide 76 */ throwISEIfImageIsInvalid()77 protected void throwISEIfImageIsInvalid() { 78 if (!mIsImageValid) { 79 throw new IllegalStateException("Image is already closed"); 80 } 81 } 82 /** 83 * Get the format for this image. This format determines the number of 84 * ByteBuffers needed to represent the image, and the general layout of the 85 * pixel data in each ByteBuffer. 86 * 87 * <p> 88 * The format is one of the values from 89 * {@link android.graphics.ImageFormat ImageFormat}, 90 * {@link android.graphics.PixelFormat PixelFormat}, or 91 * {@link android.hardware.HardwareBuffer HardwareBuffer}. The mapping between the 92 * formats and the planes is as follows (any formats not listed will have 1 plane): 93 * </p> 94 * 95 * <table> 96 * <tr> 97 * <th>Format</th> 98 * <th>Plane count</th> 99 * <th>Layout details</th> 100 * </tr> 101 * <tr> 102 * <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td> 103 * <td>1</td> 104 * <td>Compressed data, so row and pixel strides are 0. To uncompress, use 105 * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}. 106 * </td> 107 * </tr> 108 * <tr> 109 * <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td> 110 * <td>3</td> 111 * <td>A luminance plane followed by the Cb and Cr chroma planes. 112 * The chroma planes have half the width and height of the luminance 113 * plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits. 114 * Each plane has its own row stride and pixel stride.</td> 115 * </tr> 116 * <tr> 117 * <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td> 118 * <td>3</td> 119 * <td>A luminance plane followed by the Cb and Cr chroma planes. 120 * The chroma planes have half the width and the full height of the luminance 121 * plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits. 122 * Each plane has its own row stride and pixel stride.</td> 123 * </tr> 124 * <tr> 125 * <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td> 126 * <td>3</td> 127 * <td>A luminance plane followed by the Cb and Cr chroma planes. 128 * The chroma planes have the same width and height as that of the luminance 129 * plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits. 130 * Each plane has its own row stride and pixel stride.</td> 131 * </tr> 132 * <tr> 133 * <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td> 134 * <td>3</td> 135 * <td>A R (red) plane followed by the G (green) and B (blue) planes. 136 * All planes have the same widths and heights. 137 * Each pixel sample in each plane has 8 bits. 138 * Each plane has its own row stride and pixel stride.</td> 139 * </tr> 140 * <tr> 141 * <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td> 142 * <td>4</td> 143 * <td>A R (red) plane followed by the G (green), B (blue), and 144 * A (alpha) planes. All planes have the same widths and heights. 145 * Each pixel sample in each plane has 8 bits. 146 * Each plane has its own row stride and pixel stride.</td> 147 * </tr> 148 * <tr> 149 * <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td> 150 * <td>1</td> 151 * <td>A single plane of raw sensor image data, with 16 bits per color 152 * sample. The details of the layout need to be queried from the source of 153 * the raw sensor data, such as 154 * {@link android.hardware.camera2.CameraDevice CameraDevice}. 155 * </td> 156 * </tr> 157 * <tr> 158 * <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td> 159 * <td>1</td> 160 * <td>A single plane of raw sensor image data of private layout. 161 * The details of the layout is implementation specific. Row stride and 162 * pixel stride are undefined for this format. Calling {@link Plane#getRowStride()} 163 * or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause 164 * UnSupportedOperationException being thrown. 165 * </td> 166 * </tr> 167 * <tr> 168 * <td>{@link android.graphics.ImageFormat#HEIC HEIC}</td> 169 * <td>1</td> 170 * <td>Compressed data, so row and pixel strides are 0. To uncompress, use 171 * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}. 172 * </td> 173 * </tr> 174 * <tr> 175 * <td>{@link android.graphics.ImageFormat#YCBCR_P010 YCBCR_P010}</td> 176 * <td>3</td> 177 * <td>P010 is a 4:2:0 YCbCr semiplanar format comprised of a WxH Y plane 178 * followed by a Wx(H/2) Cb and Cr planes. Each sample is represented by a 16-bit 179 * little-endian value, with the lower 6 bits set to zero. Since this is guaranteed to be 180 * a semi-planar format, the Cb plane can also be treated as an interleaved Cb/Cr plane. 181 * </td> 182 * </tr> 183 * </table> 184 * 185 * @see android.graphics.ImageFormat 186 * @see android.graphics.PixelFormat 187 * @see android.hardware.HardwareBuffer 188 */ getFormat()189 public abstract int getFormat(); 190 191 /** 192 * The width of the image in pixels. For formats where some color channels 193 * are subsampled, this is the width of the largest-resolution plane. 194 */ getWidth()195 public abstract int getWidth(); 196 197 /** 198 * The height of the image in pixels. For formats where some color channels 199 * are subsampled, this is the height of the largest-resolution plane. 200 */ getHeight()201 public abstract int getHeight(); 202 203 /** 204 * Get the timestamp associated with this frame. 205 * <p> 206 * The timestamp is measured in nanoseconds, and is normally monotonically 207 * increasing. The timestamps for the images from different sources may have 208 * different timebases therefore may not be comparable. The specific meaning and 209 * timebase of the timestamp depend on the source providing images. See 210 * {@link android.hardware.Camera Camera}, 211 * {@link android.hardware.camera2.CameraDevice CameraDevice}, 212 * {@link MediaPlayer} and {@link MediaCodec} for more details. 213 * </p> 214 */ getTimestamp()215 public abstract long getTimestamp(); 216 217 /** 218 * Get the transformation associated with this frame. 219 * @return The window transformation that needs to be applied for this frame. 220 * @hide 221 */ 222 @SuppressWarnings("HiddenAbstractMethod") getTransform()223 public abstract int getTransform(); 224 225 /** 226 * Get the scaling mode associated with this frame. 227 * @return The scaling mode that needs to be applied for this frame. 228 * @hide 229 */ 230 @SuppressWarnings("HiddenAbstractMethod") getScalingMode()231 public abstract int getScalingMode(); 232 233 /** 234 * Get the SyncFence object associated with this frame. 235 * 236 * <p>This function returns an invalid SyncFence after {@link #getPlanes()} on the image 237 * dequeued from {@link ImageWriter} via {@link ImageWriter#dequeueInputImage()}.</p> 238 * 239 * @return The SyncFence for this frame. 240 * @throws IOException if there is an error when a SyncFence object returns. 241 * @see android.hardware.SyncFence 242 */ getFence()243 public @NonNull SyncFence getFence() throws IOException { 244 return SyncFence.createEmpty(); 245 } 246 247 /** 248 * Get the number of planes. 249 * @return The number of expected planes. 250 * @hide 251 */ getPlaneCount()252 public int getPlaneCount() { 253 return -1; 254 } 255 /** 256 * Get the {@link android.hardware.HardwareBuffer HardwareBuffer} handle of the input image 257 * intended for GPU and/or hardware access. 258 * <p> 259 * The returned {@link android.hardware.HardwareBuffer HardwareBuffer} shall not be used 260 * after {@link Image#close Image.close()} has been called. 261 * </p> 262 * @return the HardwareBuffer associated with this Image or null if this Image doesn't support 263 * this feature. (Unsupported use cases include Image instances obtained through 264 * {@link android.media.MediaCodec MediaCodec}, and on versions prior to Android P, 265 * {@link android.media.ImageWriter ImageWriter}). 266 */ 267 @Nullable getHardwareBuffer()268 public HardwareBuffer getHardwareBuffer() { 269 throwISEIfImageIsInvalid(); 270 return null; 271 } 272 273 /** 274 * Set the timestamp associated with this frame. 275 * <p> 276 * The timestamp is measured in nanoseconds, and is normally monotonically 277 * increasing. The timestamps for the images from different sources may have 278 * different timebases therefore may not be comparable. The specific meaning and 279 * timebase of the timestamp depend on the source providing images. See 280 * {@link android.hardware.Camera Camera}, 281 * {@link android.hardware.camera2.CameraDevice CameraDevice}, 282 * {@link MediaPlayer} and {@link MediaCodec} for more details. 283 * </p> 284 * <p> 285 * For images dequeued from {@link ImageWriter} via 286 * {@link ImageWriter#dequeueInputImage()}, it's up to the application to 287 * set the timestamps correctly before sending them back to the 288 * {@link ImageWriter}, or the timestamp will be generated automatically when 289 * {@link ImageWriter#queueInputImage queueInputImage()} is called. 290 * </p> 291 * 292 * @param timestamp The timestamp to be set for this image. 293 */ setTimestamp(long timestamp)294 public void setTimestamp(long timestamp) { 295 throwISEIfImageIsInvalid(); 296 return; 297 } 298 299 /** 300 * Set the fence file descriptor with this frame. 301 * @param fence The fence file descriptor to be set for this frame. 302 * @throws IOException if there is an error when setting a SyncFence. 303 * @see android.hardware.SyncFence 304 */ setFence(@onNull SyncFence fence)305 public void setFence(@NonNull SyncFence fence) throws IOException { 306 throwISEIfImageIsInvalid(); 307 return; 308 } 309 310 private @NamedDataSpace int mDataSpace = DataSpace.DATASPACE_UNKNOWN; 311 312 /** 313 * Get the dataspace associated with this frame. 314 */ 315 @SuppressLint("MethodNameUnits") getDataSpace()316 public @NamedDataSpace int getDataSpace() { 317 throwISEIfImageIsInvalid(); 318 return mDataSpace; 319 } 320 321 /** 322 * Set the dataspace associated with this frame. 323 * <p> 324 * If dataspace for an image is not set, dataspace value depends on {@link android.view.Surface} 325 * that is provided in the {@link ImageWriter} constructor. 326 * </p> 327 * 328 * @param dataSpace The Dataspace to be set for this image 329 */ setDataSpace(@amedDataSpace int dataSpace)330 public void setDataSpace(@NamedDataSpace int dataSpace) { 331 throwISEIfImageIsInvalid(); 332 mDataSpace = dataSpace; 333 } 334 335 private Rect mCropRect; 336 337 /** 338 * Get the crop rectangle associated with this frame. 339 * <p> 340 * The crop rectangle specifies the region of valid pixels in the image, 341 * using coordinates in the largest-resolution plane. 342 */ getCropRect()343 public Rect getCropRect() { 344 throwISEIfImageIsInvalid(); 345 346 if (mCropRect == null) { 347 return new Rect(0, 0, getWidth(), getHeight()); 348 } else { 349 return new Rect(mCropRect); // return a copy 350 } 351 } 352 353 /** 354 * Set the crop rectangle associated with this frame. 355 * <p> 356 * The crop rectangle specifies the region of valid pixels in the image, 357 * using coordinates in the largest-resolution plane. 358 */ setCropRect(Rect cropRect)359 public void setCropRect(Rect cropRect) { 360 throwISEIfImageIsInvalid(); 361 362 if (cropRect != null) { 363 cropRect = new Rect(cropRect); // make a copy 364 if (!cropRect.intersect(0, 0, getWidth(), getHeight())) { 365 cropRect.setEmpty(); 366 } 367 } 368 mCropRect = cropRect; 369 } 370 371 /** 372 * Get the array of pixel planes for this Image. The number of planes is 373 * determined by the format of the Image. The application will get an empty 374 * array if the image format is {@link android.graphics.ImageFormat#PRIVATE 375 * PRIVATE}, because the image pixel data is not directly accessible. The 376 * application can check the image format by calling 377 * {@link Image#getFormat()}. 378 */ getPlanes()379 public abstract Plane[] getPlanes(); 380 381 /** 382 * Free up this frame for reuse. 383 * <p> 384 * After calling this method, calling any methods on this {@code Image} will 385 * result in an {@link IllegalStateException}, and attempting to read from 386 * or write to {@link ByteBuffer ByteBuffers} returned by an earlier 387 * {@link Plane#getBuffer} call will have undefined behavior. If the image 388 * was obtained from {@link ImageWriter} via 389 * {@link ImageWriter#dequeueInputImage()}, after calling this method, any 390 * image data filled by the application will be lost and the image will be 391 * returned to {@link ImageWriter} for reuse. Images given to 392 * {@link ImageWriter#queueInputImage queueInputImage()} are automatically 393 * closed. 394 * </p> 395 */ 396 @Override close()397 public abstract void close(); 398 399 /** 400 * <p> 401 * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}). 402 * </p> 403 * <p> 404 * This is a package private method that is only used internally. 405 * </p> 406 * 407 * @return true if the image is attachable to a new owner, false if the image is still attached 408 * to its current owner, or the image is a stand-alone image and is not attachable to 409 * a new owner. 410 * @hide 411 */ isAttachable()412 public boolean isAttachable() { 413 throwISEIfImageIsInvalid(); 414 415 return false; 416 } 417 418 /** 419 * <p> 420 * Get the owner of the {@link Image}. 421 * </p> 422 * <p> 423 * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter}, 424 * {@link MediaCodec} etc. This method returns the owner that produces this image, or null 425 * if the image is stand-alone image or the owner is unknown. 426 * </p> 427 * <p> 428 * This is a package private method that is only used internally. 429 * </p> 430 * 431 * @return The owner of the Image. 432 */ getOwner()433 Object getOwner() { 434 throwISEIfImageIsInvalid(); 435 436 return null; 437 } 438 439 /** 440 * Get native context (buffer pointer) associated with this image. 441 * <p> 442 * This is a package private method that is only used internally. It can be 443 * used to get the native buffer pointer and passed to native, which may be 444 * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse 445 * JNI call. 446 * </p> 447 * 448 * @return native context associated with this Image. 449 */ getNativeContext()450 long getNativeContext() { 451 throwISEIfImageIsInvalid(); 452 453 return 0; 454 } 455 456 /** 457 * <p>A single color plane of image data.</p> 458 * 459 * <p>The number and meaning of the planes in an Image are determined by the 460 * format of the Image.</p> 461 * 462 * <p>Once the Image has been closed, any access to the plane's 463 * ByteBuffer will fail.</p> 464 * 465 * @see #getFormat 466 */ 467 public static abstract class Plane { 468 /** 469 * @hide 470 */ 471 @UnsupportedAppUsage 472 @TestApi Plane()473 protected Plane() { 474 } 475 476 /** 477 * <p>The row stride for this color plane, in bytes.</p> 478 * 479 * <p>This is the distance between the start of two consecutive rows of 480 * pixels in the image. Note that row stride is undefined for some formats 481 * such as 482 * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}, 483 * and calling getRowStride on images of these formats will 484 * cause an UnsupportedOperationException being thrown. 485 * For formats where row stride is well defined, the row stride 486 * is always greater than 0.</p> 487 */ getRowStride()488 public abstract int getRowStride(); 489 /** 490 * <p>The distance between adjacent pixel samples, in bytes.</p> 491 * 492 * <p>This is the distance between two consecutive pixel values in a row 493 * of pixels. It may be larger than the size of a single pixel to 494 * account for interleaved image data or padded formats. 495 * Note that pixel stride is undefined for some formats such as 496 * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}, 497 * and calling getPixelStride on images of these formats will 498 * cause an UnsupportedOperationException being thrown. 499 * For formats where pixel stride is well defined, the pixel stride 500 * is always greater than 0.</p> 501 */ getPixelStride()502 public abstract int getPixelStride(); 503 /** 504 * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer} 505 * containing the frame data.</p> 506 * 507 * <p>In particular, the buffer returned will always have 508 * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so 509 * the underlying data could be mapped as a pointer in JNI without doing 510 * any copies with {@code GetDirectBufferAddress}.</p> 511 * 512 * <p>For raw formats, each plane is only guaranteed to contain data 513 * up to the last pixel in the last row. In other words, the stride 514 * after the last row may not be mapped into the buffer. This is a 515 * necessary requirement for any interleaved format.</p> 516 * 517 * @return the byte buffer containing the image data for this plane. 518 */ getBuffer()519 public abstract ByteBuffer getBuffer(); 520 } 521 522 } 523