• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
18 #define HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
19 
20 /*
21  * Contains declaration of an abstract class EmulatedCameraDevice that defines
22  * functionality expected from an emulated physical camera device:
23  *  - Obtaining and setting camera device parameters
24  *  - Capturing frames
25  *  - Streaming video
26  *  - etc.
27  */
28 
29 #include <utils/threads.h>
30 #include <utils/KeyedVector.h>
31 #include <utils/String8.h>
32 #include "EmulatedCameraCommon.h"
33 #include "Converters.h"
34 #include "WorkerThread.h"
35 
36 #undef min
37 #undef max
38 #include <vector>
39 
40 namespace android {
41 
42 class EmulatedCamera;
43 
44 /* Encapsulates an abstract class EmulatedCameraDevice that defines
45  * functionality expected from an emulated physical camera device:
46  *  - Obtaining and setting camera device parameters
47  *  - Capturing frames
48  *  - Streaming video
49  *  - etc.
50  */
51 class EmulatedCameraDevice {
52 public:
53     /* Constructs EmulatedCameraDevice instance.
54      * Param:
55      *  camera_hal - Emulated camera that implements the camera HAL API, and
56      *      manages (contains) this object.
57      */
58     explicit EmulatedCameraDevice(EmulatedCamera* camera_hal);
59 
60     /* Destructs EmulatedCameraDevice instance. */
61     virtual ~EmulatedCameraDevice();
62 
63     /***************************************************************************
64      * Emulated camera device abstract interface
65      **************************************************************************/
66 
67 public:
68     /* Connects to the camera device.
69      * This method must be called on an initialized instance of this class.
70      * Return:
71      *  NO_ERROR on success, or an appropriate error status.
72      */
73     virtual status_t connectDevice() = 0;
74 
75     /* Disconnects from the camera device.
76      * Return:
77      *  NO_ERROR on success, or an appropriate error status. If this method is
78      *  called for already disconnected, or uninitialized instance of this class,
79      *  a successful status must be returned from this method. If this method is
80      *  called for an instance that is in the "started" state, this method must
81      *  return a failure.
82      */
83     virtual status_t disconnectDevice() = 0;
84 
85     /* Starts the camera device.
86      * This method tells the camera device to start capturing frames of the given
87      * dimensions for the given pixel format. Note that this method doesn't start
88      * the delivery of the captured frames to the emulated camera. Call
89      * startDeliveringFrames method to start delivering frames. This method must
90      * be called on a connected instance of this class. If it is called on a
91      * disconnected instance, this method must return a failure.
92      * Param:
93      *  width, height - Frame dimensions to use when capturing video frames.
94      *  pix_fmt - Pixel format to use when capturing video frames.
95      * Return:
96      *  NO_ERROR on success, or an appropriate error status.
97      */
98     virtual status_t startDevice(int width, int height, uint32_t pix_fmt) = 0;
99 
100     /* Stops the camera device.
101      * This method tells the camera device to stop capturing frames. Note that
102      * this method doesn't stop delivering frames to the emulated camera. Always
103      * call stopDeliveringFrames prior to calling this method.
104      * Return:
105      *  NO_ERROR on success, or an appropriate error status. If this method is
106      *  called for an object that is not capturing frames, or is disconnected,
107      *  or is uninitialized, a successful status must be returned from this
108      *  method.
109      */
110     virtual status_t stopDevice() = 0;
111 
112     /***************************************************************************
113      * Emulated camera device public API
114      **************************************************************************/
115 
116 public:
117     /* Initializes EmulatedCameraDevice instance.
118      * Derived classes should override this method in order to cache static
119      * properties of the physical device (list of supported pixel formats, frame
120      * sizes, etc.) If this method is called on an already initialized instance,
121      * it must return a successful status.
122      * Return:
123      *  NO_ERROR on success, or an appropriate error status.
124      */
125     virtual status_t Initialize();
126 
127     /* Initializes the white balance modes parameters.
128      * The parameters are passed by each individual derived camera API to
129      * represent that different camera manufacturers may have different
130      * preferences on the white balance parameters. Green channel in the RGB
131      * color space is fixed to keep the luminance to be reasonably constant.
132      *
133      * Param:
134      * mode the text describing the current white balance mode
135      * r_scale the scale factor for the R channel in RGB space
136      * b_scale the scale factor for the B channel in RGB space.
137      */
138     void initializeWhiteBalanceModes(const char* mode,
139                                      const float r_scale,
140                                      const float b_scale);
141 
142     /* Starts delivering frames captured from the camera device.
143      * This method will start the worker thread that would be pulling frames from
144      * the camera device, and will deliver the pulled frames back to the emulated
145      * camera via onNextFrameAvailable callback. This method must be called on a
146      * connected instance of this class with a started camera device. If it is
147      * called on a disconnected instance, or camera device has not been started,
148      * this method must return a failure.
149      * Param:
150      *  one_burst - Controls how many frames should be delivered. If this
151      *      parameter is 'true', only one captured frame will be delivered to the
152      *      emulated camera. If this parameter is 'false', frames will keep
153      *      coming until stopDeliveringFrames method is called. Typically, this
154      *      parameter is set to 'true' only in order to obtain a single frame
155      *      that will be used as a "picture" in takePicture method of the
156      *      emulated camera.
157      * Return:
158      *  NO_ERROR on success, or an appropriate error status.
159      */
160     virtual status_t startDeliveringFrames(bool one_burst);
161 
162     /* Stops delivering frames captured from the camera device.
163      * This method will stop the worker thread started by startDeliveringFrames.
164      * Return:
165      *  NO_ERROR on success, or an appropriate error status.
166      */
167     virtual status_t stopDeliveringFrames();
168 
169     /* Set the preview frame rate.
170      * Indicates the rate at which the camera should provide preview frames in
171      * frames per second. */
172     status_t setPreviewFrameRate(int framesPerSecond);
173 
174     /* Sets the exposure compensation for the camera device.
175      */
176     void setExposureCompensation(const float ev);
177 
178     /* Sets the white balance mode for the device.
179      */
180     void setWhiteBalanceMode(const char* mode);
181 
182     /* Gets current framebuffer in a selected format
183      * This method must be called on a connected instance of this class with a
184      * started camera device. If it is called on a disconnected instance, or
185      * camera device has not been started, this method must return a failure.
186      * Note that this method should be called only after at least one frame has
187      * been captured and delivered. Otherwise it will return garbage in the
188      * preview frame buffer. Typically, this method should be called from
189      * onNextFrameAvailable callback. The method can perform some basic pixel
190      * format conversion for the most efficient conversions. If a conversion
191      * is not supported the method will fail. Note that this does NOT require
192      * that the current frame be locked using a FrameLock object.
193      *
194      * Param:
195      *  buffer - Buffer, large enough to contain the entire frame.
196      *  pixelFormat - The pixel format to convert to, use
197      *                getOriginalPixelFormat() to get the configured pixel
198      *                format (if using this no conversion will be needed)
199      *  timestamp - Receives the timestamp at which the preview frame was
200      *              generated.
201      * Return:
202      *  NO_ERROR on success, or an appropriate error status.
203      */
204     virtual status_t getCurrentFrame(void* buffer, uint32_t pixelFormat,
205                                      int64_t* timestamp);
206 
207     /* Gets current framebuffer, converted into preview frame format.
208      * This method must be called on a connected instance of this class with a
209      * started camera device. If it is called on a disconnected instance, or
210      * camera device has not been started, this method must return a failure.
211      * Note that this method should be called only after at least one frame has
212      * been captured and delivered. Otherwise it will return garbage in the
213      * preview frame buffer. Typically, this method should be called from
214      * onNextFrameAvailable callback. Note that this does NOT require that the
215      * current frame be locked using a FrameLock object.
216      * Param:
217      *  buffer - Buffer, large enough to contain the entire preview frame.
218      *  timestamp - Receives the timestamp at which the preview frame was
219      *              generated.
220      * Return:
221      *  NO_ERROR on success, or an appropriate error status.
222      */
223     virtual status_t getCurrentPreviewFrame(void* buffer, int64_t* timestamp);
224 
225     /* Gets a pointer to the current frame buffer in its raw format.
226      * This method must be called on a connected instance of this class with a
227      * started camera device. If it is called on a disconnected instance, or
228      * camera device has not been started, this method must return NULL.
229      * This method should only be called when the frame lock is held through
230      * a FrameLock object. Otherwise the contents of the frame might change
231      * unexpectedly or its memory could be deallocated leading to a crash.
232      * Return:
233      *  A pointer to the current frame buffer on success, NULL otherwise.
234      */
235     virtual const void* getCurrentFrame();
236 
237     class FrameLock {
238     public:
239         FrameLock(EmulatedCameraDevice& cameraDevice);
240         ~FrameLock();
241     private:
242         EmulatedCameraDevice& mCameraDevice;
243     };
244 
245     /* Gets width of the frame obtained from the physical device.
246      * Return:
247      *  Width of the frame obtained from the physical device. Note that value
248      *  returned from this method is valid only in case if camera device has been
249      *  started.
250      */
getFrameWidth()251     inline int getFrameWidth() const
252     {
253         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
254         return mFrameWidth;
255     }
256 
257     /* Gets height of the frame obtained from the physical device.
258      * Return:
259      *  Height of the frame obtained from the physical device. Note that value
260      *  returned from this method is valid only in case if camera device has been
261      *  started.
262      */
getFrameHeight()263     inline int getFrameHeight() const
264     {
265         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
266         return mFrameHeight;
267     }
268 
269     /* Gets byte size of the current frame buffer.
270      * Return:
271      *  Byte size of the frame buffer. Note that value returned from this method
272      *  is valid only in case if camera device has been started.
273      */
getFrameBufferSize()274     inline size_t getFrameBufferSize() const
275     {
276         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
277         return mFrameBufferSize;
278     }
279 
280     /* Get number of bytes required to store current video frame buffer. Note
281      * that this can be different from getFrameBufferSize depending on the pixel
282      * format and resolution. The video frames use a pixel format that is
283      * suitable for the encoding pipeline and this may have different alignment
284      * requirements than the pixel format used for regular frames.
285      */
getVideoFrameBufferSize()286     inline size_t getVideoFrameBufferSize() const
287     {
288         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
289         // Currently the video format is always YUV 420 without any kind of
290         // alignment. So each pixel uses 12 bits, and then we divide by 8 to get
291         // the size in bytes. If additional pixel formats are supported this
292         // should be updated to take the selected video format into
293         // consideration.
294         return (mFrameWidth * mFrameHeight * 12) / 8;
295     }
296 
297     /* Gets number of pixels in the current frame buffer.
298      * Return:
299      *  Number of pixels in the frame buffer. Note that value returned from this
300      *  method is valid only in case if camera device has been started.
301      */
getPixelNum()302     inline int getPixelNum() const
303     {
304         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
305         return mTotalPixels;
306     }
307 
308     /* Gets pixel format of the frame that camera device streams to this class.
309      * Throughout camera framework, there are three different forms of pixel
310      * format representation:
311      *  - Original format, as reported by the actual camera device. Values for
312      *    this format are declared in bionic/libc/kernel/common/linux/videodev2.h
313      *  - String representation as defined in CameraParameters::PIXEL_FORMAT_XXX
314      *    strings in frameworks/base/include/camera/CameraParameters.h
315      *  - HAL_PIXEL_FORMAT_XXX format, as defined in system/core/include/system/graphics.h
316      * Since emulated camera device gets its data from the actual device, it gets
317      * pixel format in the original form. And that's the pixel format
318      * representation that will be returned from this method. HAL components will
319      * need to translate value returned from this method to the appropriate form.
320      * This method must be called only on started instance of this class, since
321      * it's applicable only when camera device is ready to stream frames.
322      * Param:
323      *  pix_fmt - Upon success contains the original pixel format.
324      * Return:
325      *  Current framebuffer's pixel format. Note that value returned from this
326      *  method is valid only in case if camera device has been started.
327      */
getOriginalPixelFormat()328     inline uint32_t getOriginalPixelFormat() const
329     {
330         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
331         return mPixelFormat;
332     }
333 
334     /*
335      * State checkers.
336      */
337 
isInitialized()338     inline bool isInitialized() const {
339         return mState != ECDS_CONSTRUCTED;
340     }
isConnected()341     inline bool isConnected() const {
342         /* Instance is connected when its status is either"connected", or
343          * "started". */
344         return mState == ECDS_CONNECTED || mState == ECDS_STARTED;
345     }
isStarted()346     inline bool isStarted() const {
347         return mState == ECDS_STARTED;
348     }
349 
350     /* Enable auto-focus for the camera, this is only possible between calls to
351      * startPreview and stopPreview, i.e. when preview frames are being
352      * delivered. This will eventually trigger a callback to the camera HAL
353      * saying auto-focus completed.
354      */
355     virtual status_t setAutoFocus();
356 
357     /* Cancel auto-focus if it's enabled.
358      */
359     virtual status_t cancelAutoFocus();
360 
361     /* Request an asynchronous camera restart with new image parameters. The
362      * restart will be performed on the same thread that delivers frames,
363      * ensuring that all callbacks are done from the same thread.
364      * Return
365      *  false if the thread request cannot be honored because no thread is
366      *        running or some other error occured.
367      */
368     bool requestRestart(int width, int height, uint32_t pixelFormat,
369                         bool takingPicture, bool oneBurst);
370 
371     /****************************************************************************
372      * Emulated camera device private API
373      ***************************************************************************/
374 protected:
375     /* Performs common validation and calculation of startDevice parameters.
376      * Param:
377      *  width, height, pix_fmt - Parameters passed to the startDevice method.
378      * Return:
379      *  NO_ERROR on success, or an appropriate error status.
380      */
381     virtual status_t commonStartDevice(int width, int height, uint32_t pix_fmt);
382 
383     /* Performs common cleanup on stopDevice.
384      * This method will undo what commonStartDevice had done.
385      */
386     virtual void commonStopDevice();
387 
388     /** Computes a luminance value after taking the exposure compensation.
389      * value into account.
390      *
391      * Param:
392      * inputY - The input luminance value.
393      * Return:
394      * The luminance value after adjusting the exposure compensation.
395      */
changeExposure(const uint8_t & inputY)396     inline uint8_t changeExposure(const uint8_t& inputY) const {
397         return static_cast<uint8_t>(clamp(static_cast<float>(inputY) *
398                                     mExposureCompensation));
399     }
400 
401     /** Computes the pixel value in YUV space after adjusting to the current
402      * white balance mode.
403      */
404     void changeWhiteBalance(uint8_t& y, uint8_t& u, uint8_t& v) const;
405 
406     /* Check if there is a pending auto-focus trigger and send a notification
407      * if there is. This should be called from the worker thread loop if the
408      * camera device wishes to use the default behavior of immediately sending
409      * an auto-focus completion event on request. Otherwise the device should
410      * implement its own auto-focus behavior. */
411     void checkAutoFocusTrigger();
412 
413     /* Implementation for getCurrentFrame that includes pixel format conversion
414      * if needed. This allows subclasses to easily use this method instead of
415      * having to reimplement the conversion all over.
416      */
417     status_t getCurrentFrameImpl(const uint8_t* source, uint8_t* dest,
418                                  uint32_t pixelFormat) const;
419 
420     /****************************************************************************
421      * Worker thread management.
422      * Typicaly when emulated camera device starts capturing frames from the
423      * actual device, it does that in a worker thread created in StartCapturing,
424      * and terminated in StopCapturing. Since this is such a typical scenario,
425      * it makes sence to encapsulate worker thread management in the base class
426      * for all emulated camera devices.
427      ***************************************************************************/
428 
429 protected:
430     /* Starts the worker thread.
431      * Typically, the worker thread is started from the startDeliveringFrames
432      * method of this class.
433      * Param:
434      *  one_burst - Controls how many times thread loop should run. If this
435      *      parameter is 'true', thread routine will run only once If this
436      *      parameter is 'false', thread routine will run until
437      *      stopWorkerThreads method is called. See startDeliveringFrames for
438      *      more info.
439      * Return:
440      *  NO_ERROR on success, or an appropriate error status.
441      */
442     virtual status_t startWorkerThread(bool one_burst);
443 
444     /* Stop the worker thread.
445      * Note that this method will always wait for the worker thread to
446      * terminate. Typically, the worker thread is stopped from the
447      * stopDeliveringFrames method of this class.
448      * Return:
449      *  NO_ERROR on success, or an appropriate error status.
450      */
451     virtual status_t stopWorkerThread();
452 
453     /* Produce a camera frame and place it in buffer. The buffer is one of
454      * the two buffers provided to mFrameProducer during construction along with
455      * a pointer to this method. The method is expected to know what size frames
456      * it provided to the producer thread. Returning false indicates an
457      * unrecoverable error that will stop the frame production thread. */
458     virtual bool produceFrame(void* buffer, int64_t* timestamp) = 0;
459 
460     /* Get the primary buffer to use when constructing the FrameProducer. */
getPrimaryBuffer()461     virtual void* getPrimaryBuffer() {
462         return mFrameBuffers[0].data();
463     }
464 
465     /* Get the seconary buffer to use when constructing the FrameProducer. */
getSecondaryBuffer()466     virtual void* getSecondaryBuffer() {
467         return mFrameBuffers[1].data();
468     }
469 
470     /* A class that encaspulates the asynchronous behavior of a camera. This
471      * includes asynchronous production (through another thread), frame delivery
472      * as well as asynchronous state changes that have to be synchronized with
473      * frame production and delivery but can't be blocking the camera HAL. */
474     class CameraThread : public WorkerThread {
475     public:
476         typedef bool (*ProduceFrameFunc)(void* opaque,
477                                          void* destinationBuffer,
478                                          int64_t* destinationTimestamp);
479         CameraThread(EmulatedCameraDevice* cameraDevice,
480                      ProduceFrameFunc producer,
481                      void* producerOpaque);
482 
483         /* Access the primary buffer of the frame producer, this is the frame
484          * that is currently not being written to. The buffer will only have
485          * valid contents if hasFrame() returns true. Note that accessing this
486          * without first having created a Lock can lead to contents changing
487          * without notice. */
488         const void* getPrimaryBuffer() const;
489         int64_t getPrimaryTimestamp() const;
490 
491         /* Lock and unlock the primary buffer */
492         void lockPrimaryBuffer();
493         void unlockPrimaryBuffer();
494 
495         void requestRestart(int width, int height, uint32_t pixelFormat,
496                             bool takingPicture, bool oneBurst);
497 
498     private:
499         bool checkRestartRequest();
500         bool waitForFrameOrTimeout(nsecs_t timeout);
501         bool inWorkerThread() override;
502 
503         status_t onThreadStart() override;
504         void onThreadExit() override;
505 
506         /* A class with a thread that will call a function at a specified
507          * interval to produce frames. This is done in a double-buffered fashion
508          * to make sure that one of the frames can be delivered without risk of
509          * overwriting its contents. Access to the primary buffer, the one NOT
510          * being drawn to, should be protected with the lock methods provided or
511          * the guarantee of not overwriting the contents does not hold.
512          */
513         class FrameProducer : public WorkerThread {
514         public:
515             FrameProducer(EmulatedCameraDevice* cameraDevice,
516                           ProduceFrameFunc producer, void* opaque,
517                           void* primaryBuffer, void* secondaryBuffer);
518 
519             /* Indicates if the producer has produced at least one frame. */
520             bool hasFrame() const;
521 
522             const void* getPrimaryBuffer() const;
523             int64_t getPrimaryTimestamp() const;
524 
525             void lockPrimaryBuffer();
526             void unlockPrimaryBuffer();
527 
528         protected:
529             bool inWorkerThread() override;
530 
531             ProduceFrameFunc mProducer;
532             void* mOpaque;
533             void* mPrimaryBuffer;
534             void* mSecondaryBuffer;
535             int64_t mPrimaryTimestamp;
536             int64_t mSecondaryTimestamp;
537             nsecs_t mLastFrame;
538             mutable Mutex mBufferMutex;
539             std::atomic<bool> mHasFrame;
540         };
541 
542         nsecs_t mCurFrameTimestamp;
543         /* Worker thread that will produce frames for the camera thread */
544         sp<FrameProducer> mFrameProducer;
545         ProduceFrameFunc mProducerFunc;
546         void* mProducerOpaque;
547         Mutex mRequestMutex;
548         int mRestartWidth;
549         int mRestartHeight;
550         uint32_t mRestartPixelFormat;
551         bool mRestartOneBurst;
552         bool mRestartTakingPicture;
553         bool mRestartRequested;
554     };
555 
556     /****************************************************************************
557      * Data members
558      ***************************************************************************/
559 
560 protected:
561     /* Locks this instance for parameters, state, etc. change. */
562     Mutex                       mObjectLock;
563 
564     /* A camera thread that is used in frame production, delivery and handling
565      * of asynchronous restarts. Internally the process of generating and
566      * delivering frames is split up into two threads. This way frames can
567      * always be delivered on time even if they cannot be produced fast enough
568      * to keep up with the expected frame rate. It also increases performance on
569      * multi-core systems. If the producer cannot keep up the last frame will
570      * simply be delivered again. */
571     sp<CameraThread>          mCameraThread;
572 
573     /* Emulated camera object containing this instance. */
574     EmulatedCamera*             mCameraHAL;
575 
576     /* Framebuffers containing the frame being drawn to and the frame being
577      * delivered. This is used by the double buffering producer thread and
578      * the consumer thread will copy frames from one of these buffers to
579      * mCurrentFrame to avoid being stalled by frame production. */
580     std::vector<uint8_t>        mFrameBuffers[2];
581 
582     /*
583      * Framebuffer properties.
584      */
585 
586     /* Byte size of the framebuffer. */
587     size_t                      mFrameBufferSize;
588 
589     /* Original pixel format (one of the V4L2_PIX_FMT_XXX values, as defined in
590      * bionic/libc/kernel/common/linux/videodev2.h */
591     uint32_t                    mPixelFormat;
592 
593     /* Frame width */
594     int                         mFrameWidth;
595 
596     /* Frame height */
597     int                         mFrameHeight;
598 
599     /* The number of frames per second that the camera should deliver */
600     int                         mFramesPerSecond;
601 
602     /* Defines byte distance between the start of each Y row */
603     int                         mYStride;
604 
605     /* Defines byte distance between the start of each U/V row. For formats with
606      * separate U and V planes this is the distance between rows in each plane.
607      * For formats with interleaved U and V components this is the distance
608      * between rows in the interleaved plane, meaning that it's the stride over
609      * the combined U and V components. */
610     int                         mUVStride;
611 
612     /* Total number of pixels */
613     int                         mTotalPixels;
614 
615     /* Exposure compensation value */
616     float                       mExposureCompensation;
617 
618     float*                      mWhiteBalanceScale;
619 
620     DefaultKeyedVector<String8, float*>      mSupportedWhiteBalanceScale;
621 
622     /* Defines possible states of the emulated camera device object.
623      */
624     enum EmulatedCameraDeviceState {
625         /* Object has been constructed. */
626         ECDS_CONSTRUCTED,
627         /* Object has been initialized. */
628         ECDS_INITIALIZED,
629         /* Object has been connected to the physical device. */
630         ECDS_CONNECTED,
631         /* Camera device has been started. */
632         ECDS_STARTED,
633     };
634 
635     /* Object state. */
636     EmulatedCameraDeviceState   mState;
637 
638 private:
639     /* Lock the current frame so that it can safely be accessed using
640      * getCurrentFrame. Prefer using a FrameLock object on the stack instead
641      * to ensure that the lock is always unlocked properly.
642      */
643     void lockCurrentFrame();
644     /* Unlock the current frame after locking it. Prefer using a FrameLock
645      * object instead.
646      */
647     void unlockCurrentFrame();
648 
staticProduceFrame(void * opaque,void * buffer,int64_t * timestamp)649     static bool staticProduceFrame(void* opaque, void* buffer,
650                                    int64_t* timestamp) {
651         auto cameraDevice = reinterpret_cast<EmulatedCameraDevice*>(opaque);
652         return cameraDevice->produceFrame(buffer, timestamp);
653     }
654 
655     /* A flag indicating if an auto-focus completion event should be sent the
656      * next time the worker thread runs. This implies that auto-focus completion
657      * event can only be delivered while preview frames are being delivered.
658      * This is also a requirement specified in the documentation where a request
659      * to perform auto-focusing is only valid between calls to startPreview and
660      * stopPreview.
661      * https://developer.android.com/reference/android/hardware/Camera.html#autoFocus(android.hardware.Camera.AutoFocusCallback)
662      */
663     std::atomic<bool> mTriggerAutoFocus;
664 };
665 
666 }; /* namespace android */
667 
668 #endif  /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H */
669