1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.NonNull; 20 import android.annotation.Nullable; 21 import android.annotation.SuppressLint; 22 import android.annotation.TestApi; 23 import android.compat.annotation.UnsupportedAppUsage; 24 import android.graphics.Rect; 25 import android.hardware.DataSpace; 26 import android.hardware.DataSpace.NamedDataSpace; 27 import android.hardware.HardwareBuffer; 28 import android.hardware.SyncFence; 29 30 import java.io.IOException; 31 import java.nio.ByteBuffer; 32 33 /** 34 * <p>A single complete image buffer to use with a media source such as a 35 * {@link MediaCodec} or a 36 * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p> 37 * 38 * <p>This class allows for efficient direct application access to the pixel 39 * data of the Image through one or more 40 * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a 41 * {@link Plane} that describes the layout of the pixel data in that plane. Due 42 * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class, 43 * Images are not directly usable as UI resources.</p> 44 * 45 * <p>Since Images are often directly produced or consumed by hardware 46 * components, they are a limited resource shared across the system, and should 47 * be closed as soon as they are no longer needed.</p> 48 * 49 * <p>For example, when using the {@link ImageReader} class to read out Images 50 * from various media sources, not closing old Image objects will prevent the 51 * availability of new Images once 52 * {@link ImageReader#getMaxImages the maximum outstanding image count} is 53 * reached. When this happens, the function acquiring new Images will typically 54 * throw an {@link IllegalStateException}.</p> 55 * 56 * @see ImageReader 57 */ 58 public abstract class Image implements AutoCloseable { 59 /** 60 * @hide 61 */ 62 protected boolean mIsImageValid = false; 63 64 /** 65 * @hide 66 */ 67 @UnsupportedAppUsage 68 @TestApi Image()69 protected Image() { 70 } 71 72 /** 73 * Throw IllegalStateException if the image is invalid (already closed). 74 * 75 * @hide 76 */ throwISEIfImageIsInvalid()77 protected void throwISEIfImageIsInvalid() { 78 if (!mIsImageValid) { 79 throw new IllegalStateException("Image is already closed"); 80 } 81 } 82 /** 83 * Get the format for this image. This format determines the number of 84 * ByteBuffers needed to represent the image, and the general layout of the 85 * pixel data in each ByteBuffer. 86 * 87 * <p> 88 * The format is one of the values from 89 * {@link android.graphics.ImageFormat ImageFormat}, 90 * {@link android.graphics.PixelFormat PixelFormat}, or 91 * {@link android.hardware.HardwareBuffer HardwareBuffer}. The mapping between the 92 * formats and the planes is as follows (any formats not listed will have 1 plane): 93 * </p> 94 * 95 * <table> 96 * <tr> 97 * <th>Format</th> 98 * <th>Plane count</th> 99 * <th>Layout details</th> 100 * </tr> 101 * <tr> 102 * <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td> 103 * <td>1</td> 104 * <td>Compressed data, so row and pixel strides are 0. To uncompress, use 105 * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}. 106 * </td> 107 * </tr> 108 * <tr> 109 * <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td> 110 * <td>3</td> 111 * <td>A luminance plane followed by the Cb and Cr chroma planes. 112 * The chroma planes have half the width and height of the luminance 113 * plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits. 114 * Each plane has its own row stride and pixel stride.</td> 115 * </tr> 116 * <tr> 117 * <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td> 118 * <td>3</td> 119 * <td>A luminance plane followed by the Cb and Cr chroma planes. 120 * The chroma planes have half the width and the full height of the luminance 121 * plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits. 122 * Each plane has its own row stride and pixel stride.</td> 123 * </tr> 124 * <tr> 125 * <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td> 126 * <td>3</td> 127 * <td>A luminance plane followed by the Cb and Cr chroma planes. 128 * The chroma planes have the same width and height as that of the luminance 129 * plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits. 130 * Each plane has its own row stride and pixel stride.</td> 131 * </tr> 132 * <tr> 133 * <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td> 134 * <td>3</td> 135 * <td>A R (red) plane followed by the G (green) and B (blue) planes. 136 * All planes have the same widths and heights. 137 * Each pixel sample in each plane has 8 bits. 138 * Each plane has its own row stride and pixel stride.</td> 139 * </tr> 140 * <tr> 141 * <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td> 142 * <td>4</td> 143 * <td>A R (red) plane followed by the G (green), B (blue), and 144 * A (alpha) planes. All planes have the same widths and heights. 145 * Each pixel sample in each plane has 8 bits. 146 * Each plane has its own row stride and pixel stride.</td> 147 * </tr> 148 * <tr> 149 * <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td> 150 * <td>1</td> 151 * <td>A single plane of raw sensor image data, with 16 bits per color 152 * sample. The details of the layout need to be queried from the source of 153 * the raw sensor data, such as 154 * {@link android.hardware.camera2.CameraDevice CameraDevice}. 155 * </td> 156 * </tr> 157 * <tr> 158 * <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td> 159 * <td>1</td> 160 * <td>A single plane of raw sensor image data of private layout. 161 * The details of the layout is implementation specific. Row stride and 162 * pixel stride are undefined for this format. Calling {@link Plane#getRowStride()} 163 * or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause 164 * UnSupportedOperationException being thrown. 165 * </td> 166 * </tr> 167 * <tr> 168 * <td>{@link android.graphics.ImageFormat#HEIC HEIC}</td> 169 * <td>1</td> 170 * <td>Compressed data, so row and pixel strides are 0. To uncompress, use 171 * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}. 172 * </td> 173 * </tr> 174 * <tr> 175 * <td>{@link android.graphics.ImageFormat#YCBCR_P010 YCBCR_P010}</td> 176 * <td>3</td> 177 * <td>P010 is a 4:2:0 YCbCr semiplanar format comprised of a WxH Y plane 178 * followed by a Wx(H/2) Cb and Cr planes. Each sample is represented by a 16-bit 179 * little-endian value, with the lower 6 bits set to zero. Since this is guaranteed to be 180 * a semi-planar format, the Cb plane can also be treated as an interleaved Cb/Cr plane. 181 * </td> 182 * </tr> 183 * <tr> 184 * <td>{@link android.graphics.ImageFormat#YCBCR_P210 YCBCR_P210}</td> 185 * <td>3</td> 186 * <td>P210 is a 4:2:2 YCbCr semiplanar format comprised of a WxH Y plane 187 * followed by a WxH Cb and Cr planes. Each sample is represented by a 16-bit 188 * little-endian value, with the lower 6 bits set to zero. Since this is guaranteed to be 189 * a semi-planar format, the Cb plane can also be treated as an interleaved Cb/Cr plane. 190 * </td> 191 * </tr> 192 * </table> 193 * 194 * @see android.graphics.ImageFormat 195 * @see android.graphics.PixelFormat 196 * @see android.hardware.HardwareBuffer 197 */ getFormat()198 public abstract int getFormat(); 199 200 /** 201 * The width of the image in pixels. For formats where some color channels 202 * are subsampled, this is the width of the largest-resolution plane. 203 */ getWidth()204 public abstract int getWidth(); 205 206 /** 207 * The height of the image in pixels. For formats where some color channels 208 * are subsampled, this is the height of the largest-resolution plane. 209 */ getHeight()210 public abstract int getHeight(); 211 212 /** 213 * Get the timestamp associated with this frame. 214 * <p> 215 * The timestamp is measured in nanoseconds, and is normally monotonically 216 * increasing. The timestamps for the images from different sources may have 217 * different timebases therefore may not be comparable. The specific meaning and 218 * timebase of the timestamp depend on the source providing images. See 219 * {@link android.hardware.Camera Camera}, 220 * {@link android.hardware.camera2.CameraDevice CameraDevice}, 221 * {@link MediaPlayer} and {@link MediaCodec} for more details. 222 * </p> 223 */ getTimestamp()224 public abstract long getTimestamp(); 225 226 /** 227 * Get the transformation associated with this frame. 228 * @return The window transformation that needs to be applied for this frame. 229 * @hide 230 */ 231 @SuppressWarnings("HiddenAbstractMethod") getTransform()232 public abstract int getTransform(); 233 234 /** 235 * Get the scaling mode associated with this frame. 236 * @return The scaling mode that needs to be applied for this frame. 237 * @hide 238 */ 239 @SuppressWarnings("HiddenAbstractMethod") getScalingMode()240 public abstract int getScalingMode(); 241 242 /** 243 * Get the SyncFence object associated with this frame. 244 * 245 * <p>This function returns an invalid SyncFence after {@link #getPlanes()} on the image 246 * dequeued from {@link ImageWriter} via {@link ImageWriter#dequeueInputImage()}.</p> 247 * 248 * @return The SyncFence for this frame. 249 * @throws IOException if there is an error when a SyncFence object returns. 250 * @see android.hardware.SyncFence 251 */ getFence()252 public @NonNull SyncFence getFence() throws IOException { 253 return SyncFence.createEmpty(); 254 } 255 256 /** 257 * Get the number of planes. 258 * @return The number of expected planes. 259 * @hide 260 */ getPlaneCount()261 public int getPlaneCount() { 262 return -1; 263 } 264 /** 265 * Get the {@link android.hardware.HardwareBuffer HardwareBuffer} handle of the input image 266 * intended for GPU and/or hardware access. 267 * <p> 268 * The returned {@link android.hardware.HardwareBuffer HardwareBuffer} shall not be used 269 * after {@link Image#close Image.close()} has been called. 270 * </p> 271 * @return the HardwareBuffer associated with this Image or null if this Image doesn't support 272 * this feature. (Unsupported use cases include Image instances obtained through 273 * {@link android.media.MediaCodec MediaCodec}, and on versions prior to Android P, 274 * {@link android.media.ImageWriter ImageWriter}). 275 */ 276 @Nullable getHardwareBuffer()277 public HardwareBuffer getHardwareBuffer() { 278 throwISEIfImageIsInvalid(); 279 return null; 280 } 281 282 /** 283 * Set the timestamp associated with this frame. 284 * <p> 285 * The timestamp is measured in nanoseconds, and is normally monotonically 286 * increasing. The timestamps for the images from different sources may have 287 * different timebases therefore may not be comparable. The specific meaning and 288 * timebase of the timestamp depend on the source providing images. See 289 * {@link android.hardware.Camera Camera}, 290 * {@link android.hardware.camera2.CameraDevice CameraDevice}, 291 * {@link MediaPlayer} and {@link MediaCodec} for more details. 292 * </p> 293 * <p> 294 * For images dequeued from {@link ImageWriter} via 295 * {@link ImageWriter#dequeueInputImage()}, it's up to the application to 296 * set the timestamps correctly before sending them back to the 297 * {@link ImageWriter}, or the timestamp will be generated automatically when 298 * {@link ImageWriter#queueInputImage queueInputImage()} is called. 299 * </p> 300 * 301 * @param timestamp The timestamp to be set for this image. 302 */ setTimestamp(long timestamp)303 public void setTimestamp(long timestamp) { 304 throwISEIfImageIsInvalid(); 305 return; 306 } 307 308 /** 309 * Set the fence file descriptor with this frame. 310 * @param fence The fence file descriptor to be set for this frame. 311 * @throws IOException if there is an error when setting a SyncFence. 312 * @see android.hardware.SyncFence 313 */ setFence(@onNull SyncFence fence)314 public void setFence(@NonNull SyncFence fence) throws IOException { 315 throwISEIfImageIsInvalid(); 316 return; 317 } 318 319 private @NamedDataSpace int mDataSpace = DataSpace.DATASPACE_UNKNOWN; 320 321 /** 322 * Get the dataspace associated with this frame. 323 */ 324 @SuppressLint("MethodNameUnits") getDataSpace()325 public @NamedDataSpace int getDataSpace() { 326 throwISEIfImageIsInvalid(); 327 return mDataSpace; 328 } 329 330 /** 331 * Set the dataspace associated with this frame. 332 * <p> 333 * If dataspace for an image is not set, dataspace value depends on {@link android.view.Surface} 334 * that is provided in the {@link ImageWriter} constructor. 335 * </p> 336 * 337 * @param dataSpace The Dataspace to be set for this image 338 */ setDataSpace(@amedDataSpace int dataSpace)339 public void setDataSpace(@NamedDataSpace int dataSpace) { 340 throwISEIfImageIsInvalid(); 341 mDataSpace = dataSpace; 342 } 343 344 private Rect mCropRect; 345 346 /** 347 * Get the crop rectangle associated with this frame. 348 * <p> 349 * The crop rectangle specifies the region of valid pixels in the image, 350 * using coordinates in the largest-resolution plane. 351 */ getCropRect()352 public Rect getCropRect() { 353 throwISEIfImageIsInvalid(); 354 355 if (mCropRect == null) { 356 return new Rect(0, 0, getWidth(), getHeight()); 357 } else { 358 return new Rect(mCropRect); // return a copy 359 } 360 } 361 362 /** 363 * Set the crop rectangle associated with this frame. 364 * <p> 365 * The crop rectangle specifies the region of valid pixels in the image, 366 * using coordinates in the largest-resolution plane. 367 */ setCropRect(Rect cropRect)368 public void setCropRect(Rect cropRect) { 369 throwISEIfImageIsInvalid(); 370 371 if (cropRect != null) { 372 cropRect = new Rect(cropRect); // make a copy 373 if (!cropRect.intersect(0, 0, getWidth(), getHeight())) { 374 cropRect.setEmpty(); 375 } 376 } 377 mCropRect = cropRect; 378 } 379 380 /** 381 * Get the array of pixel planes for this Image. The number of planes is 382 * determined by the format of the Image. The application will get an empty 383 * array if the image format is {@link android.graphics.ImageFormat#PRIVATE 384 * PRIVATE}, because the image pixel data is not directly accessible. The 385 * application can check the image format by calling 386 * {@link Image#getFormat()}. 387 */ getPlanes()388 public abstract Plane[] getPlanes(); 389 390 /** 391 * Free up this frame for reuse. 392 * <p> 393 * After calling this method, calling any methods on this {@code Image} will 394 * result in an {@link IllegalStateException}, and attempting to read from 395 * or write to {@link ByteBuffer ByteBuffers} returned by an earlier 396 * {@link Plane#getBuffer} call will have undefined behavior. If the image 397 * was obtained from {@link ImageWriter} via 398 * {@link ImageWriter#dequeueInputImage()}, after calling this method, any 399 * image data filled by the application will be lost and the image will be 400 * returned to {@link ImageWriter} for reuse. Images given to 401 * {@link ImageWriter#queueInputImage queueInputImage()} are automatically 402 * closed. 403 * </p> 404 */ 405 @Override close()406 public abstract void close(); 407 408 /** 409 * <p> 410 * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}). 411 * </p> 412 * <p> 413 * This is a package private method that is only used internally. 414 * </p> 415 * 416 * @return true if the image is attachable to a new owner, false if the image is still attached 417 * to its current owner, or the image is a stand-alone image and is not attachable to 418 * a new owner. 419 * @hide 420 */ isAttachable()421 public boolean isAttachable() { 422 throwISEIfImageIsInvalid(); 423 424 return false; 425 } 426 427 /** 428 * <p> 429 * Get the owner of the {@link Image}. 430 * </p> 431 * <p> 432 * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter}, 433 * {@link MediaCodec} etc. This method returns the owner that produces this image, or null 434 * if the image is stand-alone image or the owner is unknown. 435 * </p> 436 * <p> 437 * This is a package private method that is only used internally. 438 * </p> 439 * 440 * @return The owner of the Image. 441 */ getOwner()442 Object getOwner() { 443 throwISEIfImageIsInvalid(); 444 445 return null; 446 } 447 448 /** 449 * Get native context (buffer pointer) associated with this image. 450 * <p> 451 * This is a package private method that is only used internally. It can be 452 * used to get the native buffer pointer and passed to native, which may be 453 * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse 454 * JNI call. 455 * </p> 456 * 457 * @return native context associated with this Image. 458 */ getNativeContext()459 long getNativeContext() { 460 throwISEIfImageIsInvalid(); 461 462 return 0; 463 } 464 465 /** 466 * <p>A single color plane of image data.</p> 467 * 468 * <p>The number and meaning of the planes in an Image are determined by the 469 * format of the Image.</p> 470 * 471 * <p>Once the Image has been closed, any access to the plane's 472 * ByteBuffer will fail.</p> 473 * 474 * @see #getFormat 475 */ 476 public static abstract class Plane { 477 /** 478 * @hide 479 */ 480 @UnsupportedAppUsage 481 @TestApi Plane()482 protected Plane() { 483 } 484 485 /** 486 * <p>The row stride for this color plane, in bytes.</p> 487 * 488 * <p>This is the distance between the start of two consecutive rows of 489 * pixels in the image. Note that row stride is undefined for some formats 490 * such as 491 * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}, 492 * and calling getRowStride on images of these formats will 493 * cause an UnsupportedOperationException being thrown. 494 * For formats where row stride is well defined, the row stride 495 * is always greater than 0.</p> 496 */ getRowStride()497 public abstract int getRowStride(); 498 /** 499 * <p>The distance between adjacent pixel samples, in bytes.</p> 500 * 501 * <p>This is the distance between two consecutive pixel values in a row 502 * of pixels. It may be larger than the size of a single pixel to 503 * account for interleaved image data or padded formats. 504 * Note that pixel stride is undefined for some formats such as 505 * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}, 506 * and calling getPixelStride on images of these formats will 507 * cause an UnsupportedOperationException being thrown. 508 * For formats where pixel stride is well defined, the pixel stride 509 * is always greater than 0.</p> 510 */ getPixelStride()511 public abstract int getPixelStride(); 512 /** 513 * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer} 514 * containing the frame data.</p> 515 * 516 * <p>In particular, the buffer returned will always have 517 * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so 518 * the underlying data could be mapped as a pointer in JNI without doing 519 * any copies with {@code GetDirectBufferAddress}.</p> 520 * 521 * <p>For raw formats, each plane is only guaranteed to contain data 522 * up to the last pixel in the last row. In other words, the stride 523 * after the last row may not be mapped into the buffer. This is a 524 * necessary requirement for any interleaved format.</p> 525 * 526 * @return the byte buffer containing the image data for this plane. 527 */ getBuffer()528 public abstract ByteBuffer getBuffer(); 529 } 530 531 } 532