• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSource"
21 #include <utils/Log.h>
22 
23 #include <OMX_Component.h>
24 #include <binder/IPCThreadState.h>
25 #include <binder/MemoryBase.h>
26 #include <binder/MemoryHeapBase.h>
27 #include <media/hardware/HardwareAPI.h>
28 #include <media/stagefright/foundation/ADebug.h>
29 #include <media/stagefright/CameraSource.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <camera/Camera.h>
34 #include <camera/CameraParameters.h>
35 #include <camera/StringUtils.h>
36 #include <com_android_graphics_libgui_flags.h>
37 #include <gui/Surface.h>
38 #include <gui/Flags.h>
39 #include <utils/String8.h>
40 #include <cutils/properties.h>
41 
42 #if LOG_NDEBUG
43 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
44 #else
45 #define UNUSED_UNLESS_VERBOSE(x)
46 #endif
47 
48 namespace android {
49 
50 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
51 
getColorFormat(const char * colorFormat)52 static int32_t getColorFormat(const char* colorFormat) {
53     if (!colorFormat) {
54         ALOGE("Invalid color format");
55         return -1;
56     }
57 
58     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
59        return OMX_COLOR_FormatYUV420Planar;
60     }
61 
62     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
63        return OMX_COLOR_FormatYUV422SemiPlanar;
64     }
65 
66     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
67         return OMX_COLOR_FormatYUV420SemiPlanar;
68     }
69 
70     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
71         return OMX_COLOR_FormatYCbYCr;
72     }
73 
74     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
75        return OMX_COLOR_Format16bitRGB565;
76     }
77 
78     if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
79        return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
80     }
81 
82     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
83         return OMX_COLOR_FormatAndroidOpaque;
84     }
85 
86     ALOGE("Uknown color format (%s), please add it to "
87          "CameraSource::getColorFormat", colorFormat);
88 
89     CHECK(!"Unknown color format");
90     return -1;
91 }
92 
93 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<SurfaceType> & surface)94 CameraSource *CameraSource::CreateFromCamera(
95     const sp<hardware::ICamera>& camera,
96     const sp<ICameraRecordingProxy>& proxy,
97     int32_t cameraId,
98     const String16& clientName,
99     uid_t clientUid,
100     pid_t clientPid,
101     Size videoSize,
102     int32_t frameRate,
103     const sp<SurfaceType>& surface) {
104 
105     CameraSource *source = new CameraSource(camera, proxy, cameraId,
106             clientName, clientUid, clientPid, videoSize, frameRate, surface);
107     return source;
108 }
109 
CameraSource(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<SurfaceType> & surface)110 CameraSource::CameraSource(
111     const sp<hardware::ICamera>& camera,
112     const sp<ICameraRecordingProxy>& proxy,
113     int32_t cameraId,
114     const String16& clientName,
115     uid_t clientUid,
116     pid_t clientPid,
117     Size videoSize,
118     int32_t frameRate,
119     const sp<SurfaceType>& surface)
120     : mCameraFlags(0),
121       mNumInputBuffers(0),
122       mVideoFrameRate(-1),
123       mCamera(0),
124       mSurface(surface),
125       mNumFramesReceived(0),
126       mLastFrameTimestampUs(0),
127       mStarted(false),
128       mEos(false),
129       mNumFramesEncoded(0),
130       mTimeBetweenFrameCaptureUs(0),
131       mFirstFrameTimeUs(0),
132       mStopSystemTimeUs(-1),
133       mNumFramesDropped(0),
134       mNumGlitches(0),
135       mGlitchDurationThresholdUs(200000),
136       mCollectStats(false) {
137     mVideoSize.width  = -1;
138     mVideoSize.height = -1;
139 
140     mInitCheck = init(camera, proxy, cameraId,
141                     clientName, clientUid, clientPid,
142                     videoSize, frameRate);
143     if (mInitCheck != OK) releaseCamera();
144 }
145 
initCheck() const146 status_t CameraSource::initCheck() const {
147     return mInitCheck;
148 }
149 
isCameraAvailable(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const std::string & clientName,uid_t clientUid,pid_t clientPid)150 status_t CameraSource::isCameraAvailable(
151     const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
152     int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
153 
154     if (camera == 0) {
155         AttributionSourceState clientAttribution;
156         clientAttribution.pid = clientPid;
157         clientAttribution.uid = clientUid;
158         clientAttribution.deviceId = kDefaultDeviceId;
159         clientAttribution.packageName = clientName;
160         clientAttribution.token = sp<BBinder>::make();
161 
162         mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
163                 /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
164                 /*forceSlowJpegMode*/false, clientAttribution);
165         if (mCamera == 0) return -EBUSY;
166         mCameraFlags &= ~FLAGS_HOT_CAMERA;
167     } else {
168         // We get the proxy from Camera, not ICamera. We need to get the proxy
169         // to the remote Camera owned by the application. Here mCamera is a
170         // local Camera object created by us. We cannot use the proxy from
171         // mCamera here.
172         mCamera = Camera::create(camera);
173         if (mCamera == 0) return -EBUSY;
174         mCameraRecordingProxy = proxy;
175         mCameraFlags |= FLAGS_HOT_CAMERA;
176         mDeathNotifier = new DeathNotifier();
177         // isBinderAlive needs linkToDeath to work.
178         IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
179     }
180 
181     mCamera->lock();
182 
183     return OK;
184 }
185 
186 
187 /*
188  * Check to see whether the requested video width and height is one
189  * of the supported sizes.
190  * @param width the video frame width in pixels
191  * @param height the video frame height in pixels
192  * @param suppportedSizes the vector of sizes that we check against
193  * @return true if the dimension (width and height) is supported.
194  */
isVideoSizeSupported(int32_t width,int32_t height,const Vector<Size> & supportedSizes)195 static bool isVideoSizeSupported(
196     int32_t width, int32_t height,
197     const Vector<Size>& supportedSizes) {
198 
199     ALOGV("isVideoSizeSupported");
200     for (size_t i = 0; i < supportedSizes.size(); ++i) {
201         if (width  == supportedSizes[i].width &&
202             height == supportedSizes[i].height) {
203             return true;
204         }
205     }
206     return false;
207 }
208 
209 /*
210  * If the preview and video output is separate, we only set the
211  * the video size, and applications should set the preview size
212  * to some proper value, and the recording framework will not
213  * change the preview size; otherwise, if the video and preview
214  * output is the same, we need to set the preview to be the same
215  * as the requested video size.
216  *
217  */
218 /*
219  * Query the camera to retrieve the supported video frame sizes
220  * and also to see whether CameraParameters::setVideoSize()
221  * is supported or not.
222  * @param params CameraParameters to retrieve the information
223  * @@param isSetVideoSizeSupported retunrs whether method
224  *      CameraParameters::setVideoSize() is supported or not.
225  * @param sizes returns the vector of Size objects for the
226  *      supported video frame sizes advertised by the camera.
227  */
getSupportedVideoSizes(const CameraParameters & params,bool * isSetVideoSizeSupported,Vector<Size> & sizes)228 static void getSupportedVideoSizes(
229     const CameraParameters& params,
230     bool *isSetVideoSizeSupported,
231     Vector<Size>& sizes) {
232 
233     *isSetVideoSizeSupported = true;
234     params.getSupportedVideoSizes(sizes);
235     if (sizes.size() == 0) {
236         ALOGD("Camera does not support setVideoSize()");
237         params.getSupportedPreviewSizes(sizes);
238         *isSetVideoSizeSupported = false;
239     }
240 }
241 
242 /*
243  * Check whether the camera has the supported color format
244  * @param params CameraParameters to retrieve the information
245  * @return OK if no error.
246  */
isCameraColorFormatSupported(const CameraParameters & params)247 status_t CameraSource::isCameraColorFormatSupported(
248         const CameraParameters& params) {
249     mColorFormat = getColorFormat(params.get(
250             CameraParameters::KEY_VIDEO_FRAME_FORMAT));
251     if (mColorFormat == -1) {
252         return BAD_VALUE;
253     }
254     return OK;
255 }
256 
257 /*
258  * Configure the camera to use the requested video size
259  * (width and height) and/or frame rate. If both width and
260  * height are -1, configuration on the video size is skipped.
261  * if frameRate is -1, configuration on the frame rate
262  * is skipped. Skipping the configuration allows one to
263  * use the current camera setting without the need to
264  * actually know the specific values (see Create() method).
265  *
266  * @param params the CameraParameters to be configured
267  * @param width the target video frame width in pixels
268  * @param height the target video frame height in pixels
269  * @param frameRate the target frame rate in frames per second.
270  * @return OK if no error.
271  */
configureCamera(CameraParameters * params,int32_t width,int32_t height,int32_t frameRate)272 status_t CameraSource::configureCamera(
273         CameraParameters* params,
274         int32_t width, int32_t height,
275         int32_t frameRate) {
276     ALOGV("configureCamera");
277     Vector<Size> sizes;
278     bool isSetVideoSizeSupportedByCamera = true;
279     getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
280     bool isCameraParamChanged = false;
281     if (width != -1 && height != -1) {
282         if (!isVideoSizeSupported(width, height, sizes)) {
283             ALOGE("Video dimension (%dx%d) is unsupported", width, height);
284             return BAD_VALUE;
285         }
286         if (isSetVideoSizeSupportedByCamera) {
287             params->setVideoSize(width, height);
288         } else {
289             params->setPreviewSize(width, height);
290         }
291         isCameraParamChanged = true;
292     } else if ((width == -1 && height != -1) ||
293                (width != -1 && height == -1)) {
294         // If one and only one of the width and height is -1
295         // we reject such a request.
296         ALOGE("Requested video size (%dx%d) is not supported", width, height);
297         return BAD_VALUE;
298     } else {  // width == -1 && height == -1
299         // Do not configure the camera.
300         // Use the current width and height value setting from the camera.
301     }
302 
303     if (frameRate != -1) {
304         CHECK(frameRate > 0 && frameRate <= 120);
305         const char* supportedFrameRates =
306                 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
307         CHECK(supportedFrameRates != NULL);
308         ALOGV("Supported frame rates: %s", supportedFrameRates);
309         char buf[4];
310         snprintf(buf, 4, "%d", frameRate);
311         if (strstr(supportedFrameRates, buf) == NULL) {
312             ALOGE("Requested frame rate (%d) is not supported: %s",
313                 frameRate, supportedFrameRates);
314             return BAD_VALUE;
315         }
316 
317         // The frame rate is supported, set the camera to the requested value.
318         params->setPreviewFrameRate(frameRate);
319         isCameraParamChanged = true;
320     } else {  // frameRate == -1
321         // Do not configure the camera.
322         // Use the current frame rate value setting from the camera
323     }
324 
325     if (isCameraParamChanged) {
326         // Either frame rate or frame size needs to be changed.
327         String8 s = params->flatten();
328         if (OK != mCamera->setParameters(s)) {
329             ALOGE("Could not change settings."
330                  " Someone else is using camera %p?", mCamera.get());
331             return -EBUSY;
332         }
333     }
334     return OK;
335 }
336 
337 /*
338  * Check whether the requested video frame size
339  * has been successfully configured or not. If both width and height
340  * are -1, check on the current width and height value setting
341  * is performed.
342  *
343  * @param params CameraParameters to retrieve the information
344  * @param the target video frame width in pixels to check against
345  * @param the target video frame height in pixels to check against
346  * @return OK if no error
347  */
checkVideoSize(const CameraParameters & params,int32_t width,int32_t height)348 status_t CameraSource::checkVideoSize(
349         const CameraParameters& params,
350         int32_t width, int32_t height) {
351 
352     ALOGV("checkVideoSize");
353     // The actual video size is the same as the preview size
354     // if the camera hal does not support separate video and
355     // preview output. In this case, we retrieve the video
356     // size from preview.
357     int32_t frameWidthActual = -1;
358     int32_t frameHeightActual = -1;
359     Vector<Size> sizes;
360     params.getSupportedVideoSizes(sizes);
361     if (sizes.size() == 0) {
362         // video size is the same as preview size
363         params.getPreviewSize(&frameWidthActual, &frameHeightActual);
364     } else {
365         // video size may not be the same as preview
366         params.getVideoSize(&frameWidthActual, &frameHeightActual);
367     }
368     if (frameWidthActual < 0 || frameHeightActual < 0) {
369         ALOGE("Failed to retrieve video frame size (%dx%d)",
370                 frameWidthActual, frameHeightActual);
371         return UNKNOWN_ERROR;
372     }
373 
374     // Check the actual video frame size against the target/requested
375     // video frame size.
376     if (width != -1 && height != -1) {
377         if (frameWidthActual != width || frameHeightActual != height) {
378             ALOGE("Failed to set video frame size to %dx%d. "
379                     "The actual video size is %dx%d ", width, height,
380                     frameWidthActual, frameHeightActual);
381             return UNKNOWN_ERROR;
382         }
383     }
384 
385     // Good now.
386     mVideoSize.width = frameWidthActual;
387     mVideoSize.height = frameHeightActual;
388     return OK;
389 }
390 
391 /*
392  * Check the requested frame rate has been successfully configured or not.
393  * If the target frameRate is -1, check on the current frame rate value
394  * setting is performed.
395  *
396  * @param params CameraParameters to retrieve the information
397  * @param the target video frame rate to check against
398  * @return OK if no error.
399  */
checkFrameRate(const CameraParameters & params,int32_t frameRate)400 status_t CameraSource::checkFrameRate(
401         const CameraParameters& params,
402         int32_t frameRate) {
403 
404     ALOGV("checkFrameRate");
405     int32_t frameRateActual = params.getPreviewFrameRate();
406     if (frameRateActual < 0) {
407         ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
408         return UNKNOWN_ERROR;
409     }
410 
411     // Check the actual video frame rate against the target/requested
412     // video frame rate.
413     if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
414         ALOGE("Failed to set preview frame rate to %d fps. The actual "
415                 "frame rate is %d", frameRate, frameRateActual);
416         return UNKNOWN_ERROR;
417     }
418 
419     // Good now.
420     mVideoFrameRate = frameRateActual;
421     return OK;
422 }
423 
424 /*
425  * Initialize the CameraSource to so that it becomes
426  * ready for providing the video input streams as requested.
427  * @param camera the camera object used for the video source
428  * @param cameraId if camera == 0, use camera with this id
429  *      as the video source
430  * @param videoSize the target video frame size. If both
431  *      width and height in videoSize is -1, use the current
432  *      width and heigth settings by the camera
433  * @param frameRate the target frame rate in frames per second.
434  *      if it is -1, use the current camera frame rate setting.
435  * @param storeMetaDataInVideoBuffers request to store meta
436  *      data or real YUV data in video buffers. Request to
437  *      store meta data in video buffers may not be honored
438  *      if the source does not support this feature.
439  *
440  * @return OK if no error.
441  */
init(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)442 status_t CameraSource::init(
443         const sp<hardware::ICamera>& camera,
444         const sp<ICameraRecordingProxy>& proxy,
445         int32_t cameraId,
446         const String16& clientName,
447         uid_t clientUid,
448         pid_t clientPid,
449         Size videoSize,
450         int32_t frameRate) {
451 
452     ALOGV("init");
453     status_t err = OK;
454     int64_t token = IPCThreadState::self()->clearCallingIdentity();
455     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
456                                videoSize, frameRate);
457     IPCThreadState::self()->restoreCallingIdentity(token);
458     return err;
459 }
460 
createVideoBufferMemoryHeap(size_t size,uint32_t bufferCount)461 void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
462     mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
463             "StageFright-CameraSource-BufferHeap");
464     for (uint32_t i = 0; i < bufferCount; i++) {
465         mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
466     }
467 }
468 
initBufferQueue(uint32_t width,uint32_t height,uint32_t format,android_dataspace dataSpace,uint32_t bufferCount)469 status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
470         uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
471     ALOGV("initBufferQueue");
472 
473     if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
474         ALOGE("%s: Buffer queue already exists", __FUNCTION__);
475         return ALREADY_EXISTS;
476     }
477 
478     uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
479     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
480         usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
481     }
482 
483     bufferCount += kConsumerBufferCount;
484 
485     sp<Surface> surface;
486     std::tie(mVideoBufferConsumer, surface) =
487             BufferItemConsumer::create(usage, bufferCount);
488     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
489 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
490     mVideoBufferProducer = surface;
491 #else
492     mVideoBufferProducer = surface->getIGraphicBufferProducer();
493 #endif  // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
494 
495     status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
496     if (res != OK) {
497         ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
498                 strerror(-res), res);
499         return res;
500     }
501 
502     res = mVideoBufferConsumer->setDefaultBufferFormat(format);
503     if (res != OK) {
504         ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
505                 strerror(-res), res);
506         return res;
507     }
508 
509     res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
510     if (res != OK) {
511         ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
512                 strerror(-res), res);
513         return res;
514     }
515 
516     res = mCamera->setVideoTarget(mVideoBufferProducer);
517     if (res != OK) {
518         ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
519         return res;
520     }
521 
522     // Create memory heap to store buffers as VideoNativeMetadata.
523     createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
524 
525     mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
526     res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
527     if (res != OK) {
528         ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
529                 strerror(-res), res);
530         return res;
531     }
532 
533     return OK;
534 }
535 
initWithCameraAccess(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)536 status_t CameraSource::initWithCameraAccess(
537         const sp<hardware::ICamera>& camera,
538         const sp<ICameraRecordingProxy>& proxy,
539         int32_t cameraId,
540         const String16& clientName,
541         uid_t clientUid,
542         pid_t clientPid,
543         Size videoSize,
544         int32_t frameRate) {
545     ALOGV("initWithCameraAccess");
546     status_t err = OK;
547 
548     if ((err = isCameraAvailable(camera, proxy, cameraId,
549             toStdString(clientName), clientUid, clientPid)) != OK) {
550         ALOGE("Camera connection could not be established.");
551         return err;
552     }
553     CameraParameters params(mCamera->getParameters());
554     if ((err = isCameraColorFormatSupported(params)) != OK) {
555         return err;
556     }
557 
558     // Set the camera to use the requested video frame size
559     // and/or frame rate.
560     if ((err = configureCamera(&params,
561                     videoSize.width, videoSize.height,
562                     frameRate))) {
563         return err;
564     }
565 
566     // Check on video frame size and frame rate.
567     CameraParameters newCameraParams(mCamera->getParameters());
568     if ((err = checkVideoSize(newCameraParams,
569                 videoSize.width, videoSize.height)) != OK) {
570         return err;
571     }
572     if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
573         return err;
574     }
575 
576     // Set the preview display. Skip this if mSurface is null because
577     // applications may already set a surface to the camera.
578     if (mSurface != NULL) {
579         // Surface may be set incorrectly or could already be used even if we just
580         // passed the lock/unlock check earlier by calling mCamera->setParameters().
581         if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
582             return err;
583         }
584     }
585 
586     // Use buffer queue to receive video buffers from camera
587     err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
588     if (err != OK) {
589         ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
590                 "%s (err=%d)", __FUNCTION__, strerror(-err), err);
591         return err;
592     }
593 
594     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
595     if (glitchDurationUs > mGlitchDurationThresholdUs) {
596         mGlitchDurationThresholdUs = glitchDurationUs;
597     }
598 
599     // XXX: query camera for the stride and slice height
600     // when the capability becomes available.
601     mMeta = new MetaData;
602     mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
603     mMeta->setInt32(kKeyColorFormat, mColorFormat);
604     mMeta->setInt32(kKeyWidth,       mVideoSize.width);
605     mMeta->setInt32(kKeyHeight,      mVideoSize.height);
606     mMeta->setInt32(kKeyStride,      mVideoSize.width);
607     mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
608     mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
609     return OK;
610 }
611 
~CameraSource()612 CameraSource::~CameraSource() {
613     if (mStarted) {
614         reset();
615     } else if (mInitCheck == OK) {
616         // Camera is initialized but because start() is never called,
617         // the lock on Camera is never released(). This makes sure
618         // Camera's lock is released in this case.
619         releaseCamera();
620     }
621 }
622 
startCameraRecording()623 status_t CameraSource::startCameraRecording() {
624     ALOGV("startCameraRecording");
625     // Reset the identity to the current thread because media server owns the
626     // camera and recording is started by the applications. The applications
627     // will connect to the camera in ICameraRecordingProxy::startRecording.
628     int64_t token = IPCThreadState::self()->clearCallingIdentity();
629     status_t err;
630 
631     // Initialize buffer queue.
632     err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
633             (android_dataspace_t)mEncoderDataSpace,
634             mNumInputBuffers > 0 ? mNumInputBuffers : 1);
635     if (err != OK) {
636         ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
637                 strerror(-err), err);
638         return err;
639     }
640 
641     // Start data flow
642     err = OK;
643     if (mCameraFlags & FLAGS_HOT_CAMERA) {
644         mCamera->unlock();
645         mCamera.clear();
646         if ((err = mCameraRecordingProxy->startRecording()) != OK) {
647             ALOGE("Failed to start recording, received error: %s (%d)",
648                     strerror(-err), err);
649         }
650     } else {
651         mCamera->startRecording();
652         if (!mCamera->recordingEnabled()) {
653             err = -EINVAL;
654             ALOGE("Failed to start recording");
655         }
656     }
657     IPCThreadState::self()->restoreCallingIdentity(token);
658     return err;
659 }
660 
start(MetaData * meta)661 status_t CameraSource::start(MetaData *meta) {
662     ALOGV("start");
663     CHECK(!mStarted);
664     if (mInitCheck != OK) {
665         ALOGE("CameraSource is not initialized yet");
666         return mInitCheck;
667     }
668 
669     if (property_get_bool("media.stagefright.record-stats", false)) {
670         mCollectStats = true;
671     }
672 
673     mStartTimeUs = 0;
674     mNumInputBuffers = 0;
675     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
676     mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
677 
678     if (meta) {
679         int64_t startTimeUs;
680         if (meta->findInt64(kKeyTime, &startTimeUs)) {
681             mStartTimeUs = startTimeUs;
682         }
683 
684         int32_t nBuffers;
685         if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
686             CHECK_GT(nBuffers, 0);
687             mNumInputBuffers = nBuffers;
688         }
689 
690         // apply encoder color format if specified
691         if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
692             ALOGI("Using encoder format: %#x", mEncoderFormat);
693         }
694         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
695             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
696             mBufferDataSpace = mEncoderDataSpace;
697         }
698     }
699 
700     status_t err;
701     if ((err = startCameraRecording()) == OK) {
702         mStarted = true;
703     }
704 
705     return err;
706 }
707 
stopCameraRecording()708 void CameraSource::stopCameraRecording() {
709     ALOGV("stopCameraRecording");
710     if (mCameraFlags & FLAGS_HOT_CAMERA) {
711         if (mCameraRecordingProxy != 0) {
712             mCameraRecordingProxy->stopRecording();
713         }
714     } else {
715         if (mCamera != 0) {
716             mCamera->stopRecording();
717         }
718     }
719 }
720 
releaseCamera()721 void CameraSource::releaseCamera() {
722     ALOGV("releaseCamera");
723     sp<Camera> camera;
724     bool coldCamera = false;
725     {
726         Mutex::Autolock autoLock(mLock);
727         // get a local ref and clear ref to mCamera now
728         camera = mCamera;
729         mCamera.clear();
730         coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
731     }
732 
733     if (camera != 0) {
734         int64_t token = IPCThreadState::self()->clearCallingIdentity();
735         if (coldCamera) {
736             ALOGV("Camera was cold when we started, stopping preview");
737             camera->stopPreview();
738             camera->disconnect();
739         }
740         camera->unlock();
741         IPCThreadState::self()->restoreCallingIdentity(token);
742     }
743 
744     {
745         Mutex::Autolock autoLock(mLock);
746         if (mCameraRecordingProxy != 0) {
747             IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
748             mCameraRecordingProxy.clear();
749         }
750         mCameraFlags = 0;
751     }
752 }
753 
reset()754 status_t CameraSource::reset() {
755     ALOGD("reset: E");
756 
757     {
758         Mutex::Autolock autoLock(mLock);
759         mStarted = false;
760         mEos = false;
761         mStopSystemTimeUs = -1;
762         mFrameAvailableCondition.signal();
763 
764         int64_t token;
765         bool isTokenValid = false;
766         if (mCamera != 0) {
767             token = IPCThreadState::self()->clearCallingIdentity();
768             isTokenValid = true;
769         }
770         releaseQueuedFrames();
771         while (!mFramesBeingEncoded.empty()) {
772             if (NO_ERROR !=
773                 mFrameCompleteCondition.waitRelative(mLock,
774                         mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
775                 ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
776                     mFramesBeingEncoded.size());
777             }
778         }
779         stopCameraRecording();
780         if (isTokenValid) {
781             IPCThreadState::self()->restoreCallingIdentity(token);
782         }
783 
784         if (mCollectStats) {
785             ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
786                     mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
787                     mLastFrameTimestampUs - mFirstFrameTimeUs);
788         }
789 
790         if (mNumGlitches > 0) {
791             ALOGW("%d long delays between neighboring video frames", mNumGlitches);
792         }
793 
794         CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
795     }
796 
797     if (mBufferQueueListener != nullptr) {
798         mBufferQueueListener->requestExit();
799         mBufferQueueListener->join();
800         mBufferQueueListener.clear();
801     }
802 
803     mVideoBufferConsumer.clear();
804     mVideoBufferProducer.clear();
805     releaseCamera();
806 
807     ALOGD("reset: X");
808     return OK;
809 }
810 
releaseRecordingFrame(const sp<IMemory> & frame)811 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
812     ALOGV("releaseRecordingFrame");
813 
814     // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
815     ssize_t offset;
816     size_t size;
817     sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
818     if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
819         ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
820                 heap->getHeapID(), mMemoryHeapBase->getHeapID());
821         return;
822     }
823 
824     VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
825         (uint8_t*)heap->getBase() + offset);
826 
827     // Find the corresponding buffer item for the native window buffer.
828     ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
829     if (index == NAME_NOT_FOUND) {
830         ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
831         return;
832     }
833 
834     BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
835     mReceivedBufferItemMap.removeItemsAt(index);
836     mVideoBufferConsumer->releaseBuffer(buffer);
837     mMemoryBases.push_back(frame);
838     mMemoryBaseAvailableCond.signal();
839 }
840 
releaseQueuedFrames()841 void CameraSource::releaseQueuedFrames() {
842     List<sp<IMemory> >::iterator it;
843     while (!mFramesReceived.empty()) {
844         it = mFramesReceived.begin();
845         releaseRecordingFrame(*it);
846         mFramesReceived.erase(it);
847         ++mNumFramesDropped;
848     }
849 }
850 
getFormat()851 sp<MetaData> CameraSource::getFormat() {
852     return mMeta;
853 }
854 
releaseOneRecordingFrame(const sp<IMemory> & frame)855 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
856     releaseRecordingFrame(frame);
857 }
858 
signalBufferReturned(MediaBufferBase * buffer)859 void CameraSource::signalBufferReturned(MediaBufferBase *buffer) {
860     ALOGV("signalBufferReturned: %p", buffer->data());
861     Mutex::Autolock autoLock(mLock);
862     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
863          it != mFramesBeingEncoded.end(); ++it) {
864         if ((*it)->unsecurePointer() ==  buffer->data()) {
865             releaseOneRecordingFrame((*it));
866             mFramesBeingEncoded.erase(it);
867             ++mNumFramesEncoded;
868             buffer->setObserver(0);
869             buffer->release();
870             mFrameCompleteCondition.signal();
871             return;
872         }
873     }
874     CHECK(!"signalBufferReturned: bogus buffer");
875 }
876 
read(MediaBufferBase ** buffer,const ReadOptions * options)877 status_t CameraSource::read(
878         MediaBufferBase **buffer, const ReadOptions *options) {
879     ALOGV("read");
880 
881     *buffer = NULL;
882 
883     int64_t seekTimeUs;
884     ReadOptions::SeekMode mode;
885     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
886         return ERROR_UNSUPPORTED;
887     }
888 
889     sp<IMemory> frame;
890     int64_t frameTime;
891 
892     {
893         Mutex::Autolock autoLock(mLock);
894         while (mStarted && !mEos && mFramesReceived.empty()) {
895             if (NO_ERROR !=
896                 mFrameAvailableCondition.waitRelative(mLock,
897                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
898                 if (mCameraRecordingProxy != 0 &&
899                     !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
900                     ALOGW("camera recording proxy is gone");
901                     return ERROR_END_OF_STREAM;
902                 }
903                 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
904                     mLastFrameTimestampUs);
905             }
906         }
907         if (!mStarted) {
908             return OK;
909         }
910         if (mFramesReceived.empty()) {
911             return ERROR_END_OF_STREAM;
912         }
913         frame = *mFramesReceived.begin();
914         mFramesReceived.erase(mFramesReceived.begin());
915 
916         frameTime = *mFrameTimes.begin();
917         mFrameTimes.erase(mFrameTimes.begin());
918         mFramesBeingEncoded.push_back(frame);
919         // TODO: Using unsecurePointer() has some associated security pitfalls
920         //       (see declaration for details).
921         //       Either document why it is safe in this case or address the
922         //       issue (e.g. by copying).
923         *buffer = new MediaBuffer(frame->unsecurePointer(), frame->size());
924         (*buffer)->setObserver(this);
925         (*buffer)->add_ref();
926         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
927         if (mBufferDataSpace != mEncoderDataSpace) {
928             ALOGD("Data space updated to %x", mBufferDataSpace);
929             (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
930             mEncoderDataSpace = mBufferDataSpace;
931         }
932     }
933     return OK;
934 }
935 
setStopTimeUs(int64_t stopTimeUs)936 status_t CameraSource::setStopTimeUs(int64_t stopTimeUs) {
937     Mutex::Autolock autoLock(mLock);
938     ALOGV("Set stoptime: %lld us", (long long)stopTimeUs);
939 
940     if (stopTimeUs < -1) {
941         ALOGE("Invalid stop time %lld us", (long long)stopTimeUs);
942         return BAD_VALUE;
943     } else if (stopTimeUs == -1) {
944         ALOGI("reset stopTime to be -1");
945     }
946 
947     mStopSystemTimeUs = stopTimeUs;
948     return OK;
949 }
950 
shouldSkipFrameLocked(int64_t timestampUs)951 bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
952     if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
953         ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
954         return true;
955     }
956 
957     if (mStopSystemTimeUs != -1 && timestampUs >= mStopSystemTimeUs) {
958         ALOGV("Drop Camera frame at %lld  stop time: %lld us",
959                 (long long)timestampUs, (long long)mStopSystemTimeUs);
960         mEos = true;
961         mFrameAvailableCondition.signal();
962         return true;
963     }
964 
965     // May need to skip frame or modify timestamp. Currently implemented
966     // by the subclass CameraSourceTimeLapse.
967     if (skipCurrentFrame(timestampUs)) {
968         return true;
969     }
970 
971     if (mNumFramesReceived > 0) {
972         if (timestampUs <= mLastFrameTimestampUs) {
973             ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
974                     (long long)timestampUs, (long long)mLastFrameTimestampUs);
975             return true;
976         }
977         if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
978             ++mNumGlitches;
979         }
980     }
981 
982     mLastFrameTimestampUs = timestampUs;
983     if (mNumFramesReceived == 0) {
984         mFirstFrameTimeUs = timestampUs;
985         // Initial delay
986         if (mStartTimeUs > 0) {
987             if (timestampUs < mStartTimeUs) {
988                 // Frame was captured before recording was started
989                 // Drop it without updating the statistical data.
990                 return true;
991             }
992             mStartTimeUs = timestampUs - mStartTimeUs;
993         }
994     }
995 
996     return false;
997 }
998 
BufferQueueListener(const sp<BufferItemConsumer> & consumer,const sp<CameraSource> & cameraSource)999 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1000         const sp<CameraSource>& cameraSource) {
1001     mConsumer = consumer;
1002     mConsumer->setFrameAvailableListener(this);
1003     mCameraSource = cameraSource;
1004 }
1005 
onFrameAvailable(const BufferItem &)1006 void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1007     ALOGV("%s: onFrameAvailable", __FUNCTION__);
1008 
1009     Mutex::Autolock l(mLock);
1010 
1011     if (!mFrameAvailable) {
1012         mFrameAvailable = true;
1013         mFrameAvailableSignal.signal();
1014     }
1015 }
1016 
threadLoop()1017 bool CameraSource::BufferQueueListener::threadLoop() {
1018     if (mConsumer == nullptr || mCameraSource == nullptr) {
1019         return false;
1020     }
1021 
1022     {
1023         Mutex::Autolock l(mLock);
1024         while (!mFrameAvailable) {
1025             if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1026                 return true;
1027             }
1028         }
1029         mFrameAvailable = false;
1030     }
1031 
1032     BufferItem buffer;
1033     while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1034         mCameraSource->processBufferQueueFrame(buffer);
1035     }
1036 
1037     return true;
1038 }
1039 
processBufferQueueFrame(BufferItem & buffer)1040 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1041     Mutex::Autolock autoLock(mLock);
1042 
1043     int64_t timestampUs = buffer.mTimestamp / 1000;
1044     if (shouldSkipFrameLocked(timestampUs)) {
1045         mVideoBufferConsumer->releaseBuffer(buffer);
1046         return;
1047     }
1048 
1049     while (mMemoryBases.empty()) {
1050         if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1051                 TIMED_OUT) {
1052             ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1053             mVideoBufferConsumer->releaseBuffer(buffer);
1054             return;
1055         }
1056     }
1057 
1058     ++mNumFramesReceived;
1059 
1060     // Find a available memory slot to store the buffer as VideoNativeMetadata.
1061     sp<IMemory> data = *mMemoryBases.begin();
1062     mMemoryBases.erase(mMemoryBases.begin());
1063     mBufferDataSpace = buffer.mDataSpace;
1064 
1065     ssize_t offset;
1066     size_t size;
1067     sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1068     VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1069         (uint8_t*)heap->getBase() + offset);
1070     memset(payload, 0, sizeof(VideoNativeMetadata));
1071     payload->eType = kMetadataBufferTypeANWBuffer;
1072     payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1073     payload->nFenceFd = -1;
1074 
1075     // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1076     // when the encoder returns the native window buffer.
1077     mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1078 
1079     mFramesReceived.push_back(data);
1080     int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1081     mFrameTimes.push_back(timeUs);
1082     ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1083         mStartTimeUs, timeUs);
1084     mFrameAvailableCondition.signal();
1085 }
1086 
metaDataStoredInVideoBuffers() const1087 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1088     ALOGV("metaDataStoredInVideoBuffers");
1089 
1090     return kMetadataBufferTypeANWBuffer;
1091 }
1092 
binderDied(const wp<IBinder> & who __unused)1093 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1094     ALOGI("Camera recording proxy died");
1095 }
1096 
1097 }  // namespace android
1098