• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media;
18 
19 import java.nio.ByteBuffer;
20 import java.lang.AutoCloseable;
21 
22 import android.annotation.Nullable;
23 import android.annotation.UnsupportedAppUsage;
24 import android.graphics.Rect;
25 import android.hardware.HardwareBuffer;
26 
27 /**
28  * <p>A single complete image buffer to use with a media source such as a
29  * {@link MediaCodec} or a
30  * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p>
31  *
32  * <p>This class allows for efficient direct application access to the pixel
33  * data of the Image through one or more
34  * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a
35  * {@link Plane} that describes the layout of the pixel data in that plane. Due
36  * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class,
37  * Images are not directly usable as UI resources.</p>
38  *
39  * <p>Since Images are often directly produced or consumed by hardware
40  * components, they are a limited resource shared across the system, and should
41  * be closed as soon as they are no longer needed.</p>
42  *
43  * <p>For example, when using the {@link ImageReader} class to read out Images
44  * from various media sources, not closing old Image objects will prevent the
45  * availability of new Images once
46  * {@link ImageReader#getMaxImages the maximum outstanding image count} is
47  * reached. When this happens, the function acquiring new Images will typically
48  * throw an {@link IllegalStateException}.</p>
49  *
50  * @see ImageReader
51  */
52 public abstract class Image implements AutoCloseable {
53     /**
54      * @hide
55      */
56     protected boolean mIsImageValid = false;
57 
58     /**
59      * @hide
60      */
61     @UnsupportedAppUsage
Image()62     protected Image() {
63     }
64 
65     /**
66      * Throw IllegalStateException if the image is invalid (already closed).
67      *
68      * @hide
69      */
throwISEIfImageIsInvalid()70     protected void throwISEIfImageIsInvalid() {
71         if (!mIsImageValid) {
72             throw new IllegalStateException("Image is already closed");
73         }
74     }
75     /**
76      * Get the format for this image. This format determines the number of
77      * ByteBuffers needed to represent the image, and the general layout of the
78      * pixel data in each ByteBuffer.
79      *
80      * <p>
81      * The format is one of the values from
82      * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the
83      * formats and the planes is as follows:
84      * </p>
85      *
86      * <table>
87      * <tr>
88      *   <th>Format</th>
89      *   <th>Plane count</th>
90      *   <th>Layout details</th>
91      * </tr>
92      * <tr>
93      *   <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td>
94      *   <td>1</td>
95      *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
96      *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
97      *   </td>
98      * </tr>
99      * <tr>
100      *   <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td>
101      *   <td>3</td>
102      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
103      *     The chroma planes have half the width and height of the luminance
104      *     plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits.
105      *     Each plane has its own row stride and pixel stride.</td>
106      * </tr>
107      * <tr>
108      *   <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
109      *   <td>3</td>
110      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
111      *     The chroma planes have half the width and the full height of the luminance
112      *     plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
113      *     Each plane has its own row stride and pixel stride.</td>
114      * </tr>
115      * <tr>
116      *   <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
117      *   <td>3</td>
118      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
119      *     The chroma planes have the same width and height as that of the luminance
120      *     plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
121      *     Each plane has its own row stride and pixel stride.</td>
122      * </tr>
123      * <tr>
124      *   <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
125      *   <td>3</td>
126      *   <td>A R (red) plane followed by the G (green) and B (blue) planes.
127      *     All planes have the same widths and heights.
128      *     Each pixel sample in each plane has 8 bits.
129      *     Each plane has its own row stride and pixel stride.</td>
130      * </tr>
131      * <tr>
132      *   <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
133      *   <td>4</td>
134      *   <td>A R (red) plane followed by the G (green), B (blue), and
135      *     A (alpha) planes. All planes have the same widths and heights.
136      *     Each pixel sample in each plane has 8 bits.
137      *     Each plane has its own row stride and pixel stride.</td>
138      * </tr>
139      * <tr>
140      *   <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
141      *   <td>1</td>
142      *   <td>A single plane of raw sensor image data, with 16 bits per color
143      *     sample. The details of the layout need to be queried from the source of
144      *     the raw sensor data, such as
145      *     {@link android.hardware.camera2.CameraDevice CameraDevice}.
146      *   </td>
147      * </tr>
148      * <tr>
149      *   <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td>
150      *   <td>1</td>
151      *   <td>A single plane of raw sensor image data of private layout.
152      *   The details of the layout is implementation specific. Row stride and
153      *   pixel stride are undefined for this format. Calling {@link Plane#getRowStride()}
154      *   or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause
155      *   UnSupportedOperationException being thrown.
156      *   </td>
157      * </tr>
158      * <tr>
159      *   <td>{@link android.graphics.ImageFormat#HEIC HEIC}</td>
160      *   <td>1</td>
161      *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
162      *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
163      *   </td>
164      * </tr>
165      * </table>
166      *
167      * @see android.graphics.ImageFormat
168      */
getFormat()169     public abstract int getFormat();
170 
171     /**
172      * The width of the image in pixels. For formats where some color channels
173      * are subsampled, this is the width of the largest-resolution plane.
174      */
getWidth()175     public abstract int getWidth();
176 
177     /**
178      * The height of the image in pixels. For formats where some color channels
179      * are subsampled, this is the height of the largest-resolution plane.
180      */
getHeight()181     public abstract int getHeight();
182 
183     /**
184      * Get the timestamp associated with this frame.
185      * <p>
186      * The timestamp is measured in nanoseconds, and is normally monotonically
187      * increasing. The timestamps for the images from different sources may have
188      * different timebases therefore may not be comparable. The specific meaning and
189      * timebase of the timestamp depend on the source providing images. See
190      * {@link android.hardware.Camera Camera},
191      * {@link android.hardware.camera2.CameraDevice CameraDevice},
192      * {@link MediaPlayer} and {@link MediaCodec} for more details.
193      * </p>
194      */
getTimestamp()195     public abstract long getTimestamp();
196 
197     /**
198      * Get the transformation associated with this frame.
199      * @return The window transformation that needs to be applied for this frame.
200      * @hide
201      */
getTransform()202     public abstract int getTransform();
203 
204     /**
205      * Get the scaling mode associated with this frame.
206      * @return The scaling mode that needs to be applied for this frame.
207      * @hide
208      */
getScalingMode()209     public abstract int getScalingMode();
210 
211     /**
212      * Get the {@link android.hardware.HardwareBuffer HardwareBuffer} handle of the input image
213      * intended for GPU and/or hardware access.
214      * <p>
215      * The returned {@link android.hardware.HardwareBuffer HardwareBuffer} shall not be used
216      * after  {@link Image#close Image.close()} has been called.
217      * </p>
218      * @return the HardwareBuffer associated with this Image or null if this Image doesn't support
219      * this feature. (Unsupported use cases include Image instances obtained through
220      * {@link android.media.MediaCodec MediaCodec}, and on versions prior to Android P,
221      * {@link android.media.ImageWriter ImageWriter}).
222      */
223     @Nullable
getHardwareBuffer()224     public HardwareBuffer getHardwareBuffer() {
225         throwISEIfImageIsInvalid();
226         return null;
227     }
228 
229     /**
230      * Set the timestamp associated with this frame.
231      * <p>
232      * The timestamp is measured in nanoseconds, and is normally monotonically
233      * increasing. The timestamps for the images from different sources may have
234      * different timebases therefore may not be comparable. The specific meaning and
235      * timebase of the timestamp depend on the source providing images. See
236      * {@link android.hardware.Camera Camera},
237      * {@link android.hardware.camera2.CameraDevice CameraDevice},
238      * {@link MediaPlayer} and {@link MediaCodec} for more details.
239      * </p>
240      * <p>
241      * For images dequeued from {@link ImageWriter} via
242      * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
243      * set the timestamps correctly before sending them back to the
244      * {@link ImageWriter}, or the timestamp will be generated automatically when
245      * {@link ImageWriter#queueInputImage queueInputImage()} is called.
246      * </p>
247      *
248      * @param timestamp The timestamp to be set for this image.
249      */
setTimestamp(long timestamp)250     public void setTimestamp(long timestamp) {
251         throwISEIfImageIsInvalid();
252         return;
253     }
254 
255     private Rect mCropRect;
256 
257     /**
258      * Get the crop rectangle associated with this frame.
259      * <p>
260      * The crop rectangle specifies the region of valid pixels in the image,
261      * using coordinates in the largest-resolution plane.
262      */
getCropRect()263     public Rect getCropRect() {
264         throwISEIfImageIsInvalid();
265 
266         if (mCropRect == null) {
267             return new Rect(0, 0, getWidth(), getHeight());
268         } else {
269             return new Rect(mCropRect); // return a copy
270         }
271     }
272 
273     /**
274      * Set the crop rectangle associated with this frame.
275      * <p>
276      * The crop rectangle specifies the region of valid pixels in the image,
277      * using coordinates in the largest-resolution plane.
278      */
setCropRect(Rect cropRect)279     public void setCropRect(Rect cropRect) {
280         throwISEIfImageIsInvalid();
281 
282         if (cropRect != null) {
283             cropRect = new Rect(cropRect);  // make a copy
284             if (!cropRect.intersect(0, 0, getWidth(), getHeight())) {
285                 cropRect.setEmpty();
286             }
287         }
288         mCropRect = cropRect;
289     }
290 
291     /**
292      * Get the array of pixel planes for this Image. The number of planes is
293      * determined by the format of the Image. The application will get an empty
294      * array if the image format is {@link android.graphics.ImageFormat#PRIVATE
295      * PRIVATE}, because the image pixel data is not directly accessible. The
296      * application can check the image format by calling
297      * {@link Image#getFormat()}.
298      */
getPlanes()299     public abstract Plane[] getPlanes();
300 
301     /**
302      * Free up this frame for reuse.
303      * <p>
304      * After calling this method, calling any methods on this {@code Image} will
305      * result in an {@link IllegalStateException}, and attempting to read from
306      * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
307      * {@link Plane#getBuffer} call will have undefined behavior. If the image
308      * was obtained from {@link ImageWriter} via
309      * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
310      * image data filled by the application will be lost and the image will be
311      * returned to {@link ImageWriter} for reuse. Images given to
312      * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
313      * closed.
314      * </p>
315      */
316     @Override
close()317     public abstract void close();
318 
319     /**
320      * <p>
321      * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
322      * </p>
323      * <p>
324      * This is a package private method that is only used internally.
325      * </p>
326      *
327      * @return true if the image is attachable to a new owner, false if the image is still attached
328      *         to its current owner, or the image is a stand-alone image and is not attachable to
329      *         a new owner.
330      */
isAttachable()331     boolean isAttachable() {
332         throwISEIfImageIsInvalid();
333 
334         return false;
335     }
336 
337     /**
338      * <p>
339      * Get the owner of the {@link Image}.
340      * </p>
341      * <p>
342      * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
343      * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
344      * if the image is stand-alone image or the owner is unknown.
345      * </p>
346      * <p>
347      * This is a package private method that is only used internally.
348      * </p>
349      *
350      * @return The owner of the Image.
351      */
getOwner()352     Object getOwner() {
353         throwISEIfImageIsInvalid();
354 
355         return null;
356     }
357 
358     /**
359      * Get native context (buffer pointer) associated with this image.
360      * <p>
361      * This is a package private method that is only used internally. It can be
362      * used to get the native buffer pointer and passed to native, which may be
363      * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
364      * JNI call.
365      * </p>
366      *
367      * @return native context associated with this Image.
368      */
getNativeContext()369     long getNativeContext() {
370         throwISEIfImageIsInvalid();
371 
372         return 0;
373     }
374 
375     /**
376      * <p>A single color plane of image data.</p>
377      *
378      * <p>The number and meaning of the planes in an Image are determined by the
379      * format of the Image.</p>
380      *
381      * <p>Once the Image has been closed, any access to the the plane's
382      * ByteBuffer will fail.</p>
383      *
384      * @see #getFormat
385      */
386     public static abstract class Plane {
387         /**
388          * @hide
389          */
390         @UnsupportedAppUsage
Plane()391         protected Plane() {
392         }
393 
394         /**
395          * <p>The row stride for this color plane, in bytes.</p>
396          *
397          * <p>This is the distance between the start of two consecutive rows of
398          * pixels in the image. Note that row stried is undefined for some formats
399          * such as
400          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
401          * and calling getRowStride on images of these formats will
402          * cause an UnsupportedOperationException being thrown.
403          * For formats where row stride is well defined, the row stride
404          * is always greater than 0.</p>
405          */
getRowStride()406         public abstract int getRowStride();
407         /**
408          * <p>The distance between adjacent pixel samples, in bytes.</p>
409          *
410          * <p>This is the distance between two consecutive pixel values in a row
411          * of pixels. It may be larger than the size of a single pixel to
412          * account for interleaved image data or padded formats.
413          * Note that pixel stride is undefined for some formats such as
414          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
415          * and calling getPixelStride on images of these formats will
416          * cause an UnsupportedOperationException being thrown.
417          * For formats where pixel stride is well defined, the pixel stride
418          * is always greater than 0.</p>
419          */
getPixelStride()420         public abstract int getPixelStride();
421         /**
422          * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer}
423          * containing the frame data.</p>
424          *
425          * <p>In particular, the buffer returned will always have
426          * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so
427          * the underlying data could be mapped as a pointer in JNI without doing
428          * any copies with {@code GetDirectBufferAddress}.</p>
429          *
430          * <p>For raw formats, each plane is only guaranteed to contain data
431          * up to the last pixel in the last row. In other words, the stride
432          * after the last row may not be mapped into the buffer. This is a
433          * necessary requirement for any interleaved format.</p>
434          *
435          * @return the byte buffer containing the image data for this plane.
436          */
getBuffer()437         public abstract ByteBuffer getBuffer();
438     }
439 
440 }
441