1 /*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <inttypes.h>
18
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSource"
21 #include <utils/Log.h>
22
23 #include <OMX_Component.h>
24 #include <binder/IPCThreadState.h>
25 #include <binder/MemoryBase.h>
26 #include <binder/MemoryHeapBase.h>
27 #include <media/hardware/HardwareAPI.h>
28 #include <media/stagefright/foundation/ADebug.h>
29 #include <media/stagefright/CameraSource.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <camera/Camera.h>
34 #include <camera/CameraParameters.h>
35 #include <gui/Surface.h>
36 #include <utils/String8.h>
37 #include <cutils/properties.h>
38
39 #if LOG_NDEBUG
40 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41 #else
42 #define UNUSED_UNLESS_VERBOSE(x)
43 #endif
44
45 namespace android {
46
47 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48
getColorFormat(const char * colorFormat)49 static int32_t getColorFormat(const char* colorFormat) {
50 if (!colorFormat) {
51 ALOGE("Invalid color format");
52 return -1;
53 }
54
55 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
56 return OMX_COLOR_FormatYUV420Planar;
57 }
58
59 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
60 return OMX_COLOR_FormatYUV422SemiPlanar;
61 }
62
63 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
64 return OMX_COLOR_FormatYUV420SemiPlanar;
65 }
66
67 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
68 return OMX_COLOR_FormatYCbYCr;
69 }
70
71 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
72 return OMX_COLOR_Format16bitRGB565;
73 }
74
75 if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
76 return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
77 }
78
79 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
80 return OMX_COLOR_FormatAndroidOpaque;
81 }
82
83 ALOGE("Uknown color format (%s), please add it to "
84 "CameraSource::getColorFormat", colorFormat);
85
86 CHECK(!"Unknown color format");
87 return -1;
88 }
89
90 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<IGraphicBufferProducer> & surface)91 CameraSource *CameraSource::CreateFromCamera(
92 const sp<hardware::ICamera>& camera,
93 const sp<ICameraRecordingProxy>& proxy,
94 int32_t cameraId,
95 const String16& clientName,
96 uid_t clientUid,
97 pid_t clientPid,
98 Size videoSize,
99 int32_t frameRate,
100 const sp<IGraphicBufferProducer>& surface) {
101
102 CameraSource *source = new CameraSource(camera, proxy, cameraId,
103 clientName, clientUid, clientPid, videoSize, frameRate, surface);
104 return source;
105 }
106
CameraSource(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<IGraphicBufferProducer> & surface)107 CameraSource::CameraSource(
108 const sp<hardware::ICamera>& camera,
109 const sp<ICameraRecordingProxy>& proxy,
110 int32_t cameraId,
111 const String16& clientName,
112 uid_t clientUid,
113 pid_t clientPid,
114 Size videoSize,
115 int32_t frameRate,
116 const sp<IGraphicBufferProducer>& surface)
117 : mCameraFlags(0),
118 mNumInputBuffers(0),
119 mVideoFrameRate(-1),
120 mCamera(0),
121 mSurface(surface),
122 mNumFramesReceived(0),
123 mLastFrameTimestampUs(0),
124 mStarted(false),
125 mEos(false),
126 mNumFramesEncoded(0),
127 mTimeBetweenFrameCaptureUs(0),
128 mFirstFrameTimeUs(0),
129 mStopSystemTimeUs(-1),
130 mNumFramesDropped(0),
131 mNumGlitches(0),
132 mGlitchDurationThresholdUs(200000),
133 mCollectStats(false) {
134 mVideoSize.width = -1;
135 mVideoSize.height = -1;
136
137 mInitCheck = init(camera, proxy, cameraId,
138 clientName, clientUid, clientPid,
139 videoSize, frameRate);
140 if (mInitCheck != OK) releaseCamera();
141 }
142
initCheck() const143 status_t CameraSource::initCheck() const {
144 return mInitCheck;
145 }
146
isCameraAvailable(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid)147 status_t CameraSource::isCameraAvailable(
148 const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
149 int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
150
151 if (camera == 0) {
152 mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
153 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
154 /*forceSlowJpegMode*/false);
155 if (mCamera == 0) return -EBUSY;
156 mCameraFlags &= ~FLAGS_HOT_CAMERA;
157 } else {
158 // We get the proxy from Camera, not ICamera. We need to get the proxy
159 // to the remote Camera owned by the application. Here mCamera is a
160 // local Camera object created by us. We cannot use the proxy from
161 // mCamera here.
162 mCamera = Camera::create(camera);
163 if (mCamera == 0) return -EBUSY;
164 mCameraRecordingProxy = proxy;
165 mCameraFlags |= FLAGS_HOT_CAMERA;
166 mDeathNotifier = new DeathNotifier();
167 // isBinderAlive needs linkToDeath to work.
168 IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
169 }
170
171 mCamera->lock();
172
173 return OK;
174 }
175
176
177 /*
178 * Check to see whether the requested video width and height is one
179 * of the supported sizes.
180 * @param width the video frame width in pixels
181 * @param height the video frame height in pixels
182 * @param suppportedSizes the vector of sizes that we check against
183 * @return true if the dimension (width and height) is supported.
184 */
isVideoSizeSupported(int32_t width,int32_t height,const Vector<Size> & supportedSizes)185 static bool isVideoSizeSupported(
186 int32_t width, int32_t height,
187 const Vector<Size>& supportedSizes) {
188
189 ALOGV("isVideoSizeSupported");
190 for (size_t i = 0; i < supportedSizes.size(); ++i) {
191 if (width == supportedSizes[i].width &&
192 height == supportedSizes[i].height) {
193 return true;
194 }
195 }
196 return false;
197 }
198
199 /*
200 * If the preview and video output is separate, we only set the
201 * the video size, and applications should set the preview size
202 * to some proper value, and the recording framework will not
203 * change the preview size; otherwise, if the video and preview
204 * output is the same, we need to set the preview to be the same
205 * as the requested video size.
206 *
207 */
208 /*
209 * Query the camera to retrieve the supported video frame sizes
210 * and also to see whether CameraParameters::setVideoSize()
211 * is supported or not.
212 * @param params CameraParameters to retrieve the information
213 * @@param isSetVideoSizeSupported retunrs whether method
214 * CameraParameters::setVideoSize() is supported or not.
215 * @param sizes returns the vector of Size objects for the
216 * supported video frame sizes advertised by the camera.
217 */
getSupportedVideoSizes(const CameraParameters & params,bool * isSetVideoSizeSupported,Vector<Size> & sizes)218 static void getSupportedVideoSizes(
219 const CameraParameters& params,
220 bool *isSetVideoSizeSupported,
221 Vector<Size>& sizes) {
222
223 *isSetVideoSizeSupported = true;
224 params.getSupportedVideoSizes(sizes);
225 if (sizes.size() == 0) {
226 ALOGD("Camera does not support setVideoSize()");
227 params.getSupportedPreviewSizes(sizes);
228 *isSetVideoSizeSupported = false;
229 }
230 }
231
232 /*
233 * Check whether the camera has the supported color format
234 * @param params CameraParameters to retrieve the information
235 * @return OK if no error.
236 */
isCameraColorFormatSupported(const CameraParameters & params)237 status_t CameraSource::isCameraColorFormatSupported(
238 const CameraParameters& params) {
239 mColorFormat = getColorFormat(params.get(
240 CameraParameters::KEY_VIDEO_FRAME_FORMAT));
241 if (mColorFormat == -1) {
242 return BAD_VALUE;
243 }
244 return OK;
245 }
246
247 /*
248 * Configure the camera to use the requested video size
249 * (width and height) and/or frame rate. If both width and
250 * height are -1, configuration on the video size is skipped.
251 * if frameRate is -1, configuration on the frame rate
252 * is skipped. Skipping the configuration allows one to
253 * use the current camera setting without the need to
254 * actually know the specific values (see Create() method).
255 *
256 * @param params the CameraParameters to be configured
257 * @param width the target video frame width in pixels
258 * @param height the target video frame height in pixels
259 * @param frameRate the target frame rate in frames per second.
260 * @return OK if no error.
261 */
configureCamera(CameraParameters * params,int32_t width,int32_t height,int32_t frameRate)262 status_t CameraSource::configureCamera(
263 CameraParameters* params,
264 int32_t width, int32_t height,
265 int32_t frameRate) {
266 ALOGV("configureCamera");
267 Vector<Size> sizes;
268 bool isSetVideoSizeSupportedByCamera = true;
269 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
270 bool isCameraParamChanged = false;
271 if (width != -1 && height != -1) {
272 if (!isVideoSizeSupported(width, height, sizes)) {
273 ALOGE("Video dimension (%dx%d) is unsupported", width, height);
274 return BAD_VALUE;
275 }
276 if (isSetVideoSizeSupportedByCamera) {
277 params->setVideoSize(width, height);
278 } else {
279 params->setPreviewSize(width, height);
280 }
281 isCameraParamChanged = true;
282 } else if ((width == -1 && height != -1) ||
283 (width != -1 && height == -1)) {
284 // If one and only one of the width and height is -1
285 // we reject such a request.
286 ALOGE("Requested video size (%dx%d) is not supported", width, height);
287 return BAD_VALUE;
288 } else { // width == -1 && height == -1
289 // Do not configure the camera.
290 // Use the current width and height value setting from the camera.
291 }
292
293 if (frameRate != -1) {
294 CHECK(frameRate > 0 && frameRate <= 120);
295 const char* supportedFrameRates =
296 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
297 CHECK(supportedFrameRates != NULL);
298 ALOGV("Supported frame rates: %s", supportedFrameRates);
299 char buf[4];
300 snprintf(buf, 4, "%d", frameRate);
301 if (strstr(supportedFrameRates, buf) == NULL) {
302 ALOGE("Requested frame rate (%d) is not supported: %s",
303 frameRate, supportedFrameRates);
304 return BAD_VALUE;
305 }
306
307 // The frame rate is supported, set the camera to the requested value.
308 params->setPreviewFrameRate(frameRate);
309 isCameraParamChanged = true;
310 } else { // frameRate == -1
311 // Do not configure the camera.
312 // Use the current frame rate value setting from the camera
313 }
314
315 if (isCameraParamChanged) {
316 // Either frame rate or frame size needs to be changed.
317 String8 s = params->flatten();
318 if (OK != mCamera->setParameters(s)) {
319 ALOGE("Could not change settings."
320 " Someone else is using camera %p?", mCamera.get());
321 return -EBUSY;
322 }
323 }
324 return OK;
325 }
326
327 /*
328 * Check whether the requested video frame size
329 * has been successfully configured or not. If both width and height
330 * are -1, check on the current width and height value setting
331 * is performed.
332 *
333 * @param params CameraParameters to retrieve the information
334 * @param the target video frame width in pixels to check against
335 * @param the target video frame height in pixels to check against
336 * @return OK if no error
337 */
checkVideoSize(const CameraParameters & params,int32_t width,int32_t height)338 status_t CameraSource::checkVideoSize(
339 const CameraParameters& params,
340 int32_t width, int32_t height) {
341
342 ALOGV("checkVideoSize");
343 // The actual video size is the same as the preview size
344 // if the camera hal does not support separate video and
345 // preview output. In this case, we retrieve the video
346 // size from preview.
347 int32_t frameWidthActual = -1;
348 int32_t frameHeightActual = -1;
349 Vector<Size> sizes;
350 params.getSupportedVideoSizes(sizes);
351 if (sizes.size() == 0) {
352 // video size is the same as preview size
353 params.getPreviewSize(&frameWidthActual, &frameHeightActual);
354 } else {
355 // video size may not be the same as preview
356 params.getVideoSize(&frameWidthActual, &frameHeightActual);
357 }
358 if (frameWidthActual < 0 || frameHeightActual < 0) {
359 ALOGE("Failed to retrieve video frame size (%dx%d)",
360 frameWidthActual, frameHeightActual);
361 return UNKNOWN_ERROR;
362 }
363
364 // Check the actual video frame size against the target/requested
365 // video frame size.
366 if (width != -1 && height != -1) {
367 if (frameWidthActual != width || frameHeightActual != height) {
368 ALOGE("Failed to set video frame size to %dx%d. "
369 "The actual video size is %dx%d ", width, height,
370 frameWidthActual, frameHeightActual);
371 return UNKNOWN_ERROR;
372 }
373 }
374
375 // Good now.
376 mVideoSize.width = frameWidthActual;
377 mVideoSize.height = frameHeightActual;
378 return OK;
379 }
380
381 /*
382 * Check the requested frame rate has been successfully configured or not.
383 * If the target frameRate is -1, check on the current frame rate value
384 * setting is performed.
385 *
386 * @param params CameraParameters to retrieve the information
387 * @param the target video frame rate to check against
388 * @return OK if no error.
389 */
checkFrameRate(const CameraParameters & params,int32_t frameRate)390 status_t CameraSource::checkFrameRate(
391 const CameraParameters& params,
392 int32_t frameRate) {
393
394 ALOGV("checkFrameRate");
395 int32_t frameRateActual = params.getPreviewFrameRate();
396 if (frameRateActual < 0) {
397 ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
398 return UNKNOWN_ERROR;
399 }
400
401 // Check the actual video frame rate against the target/requested
402 // video frame rate.
403 if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
404 ALOGE("Failed to set preview frame rate to %d fps. The actual "
405 "frame rate is %d", frameRate, frameRateActual);
406 return UNKNOWN_ERROR;
407 }
408
409 // Good now.
410 mVideoFrameRate = frameRateActual;
411 return OK;
412 }
413
414 /*
415 * Initialize the CameraSource to so that it becomes
416 * ready for providing the video input streams as requested.
417 * @param camera the camera object used for the video source
418 * @param cameraId if camera == 0, use camera with this id
419 * as the video source
420 * @param videoSize the target video frame size. If both
421 * width and height in videoSize is -1, use the current
422 * width and heigth settings by the camera
423 * @param frameRate the target frame rate in frames per second.
424 * if it is -1, use the current camera frame rate setting.
425 * @param storeMetaDataInVideoBuffers request to store meta
426 * data or real YUV data in video buffers. Request to
427 * store meta data in video buffers may not be honored
428 * if the source does not support this feature.
429 *
430 * @return OK if no error.
431 */
init(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)432 status_t CameraSource::init(
433 const sp<hardware::ICamera>& camera,
434 const sp<ICameraRecordingProxy>& proxy,
435 int32_t cameraId,
436 const String16& clientName,
437 uid_t clientUid,
438 pid_t clientPid,
439 Size videoSize,
440 int32_t frameRate) {
441
442 ALOGV("init");
443 status_t err = OK;
444 int64_t token = IPCThreadState::self()->clearCallingIdentity();
445 err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
446 videoSize, frameRate);
447 IPCThreadState::self()->restoreCallingIdentity(token);
448 return err;
449 }
450
createVideoBufferMemoryHeap(size_t size,uint32_t bufferCount)451 void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
452 mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
453 "StageFright-CameraSource-BufferHeap");
454 for (uint32_t i = 0; i < bufferCount; i++) {
455 mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
456 }
457 }
458
initBufferQueue(uint32_t width,uint32_t height,uint32_t format,android_dataspace dataSpace,uint32_t bufferCount)459 status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
460 uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
461 ALOGV("initBufferQueue");
462
463 if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
464 ALOGE("%s: Buffer queue already exists", __FUNCTION__);
465 return ALREADY_EXISTS;
466 }
467
468 // Create a buffer queue.
469 sp<IGraphicBufferProducer> producer;
470 sp<IGraphicBufferConsumer> consumer;
471 BufferQueue::createBufferQueue(&producer, &consumer);
472
473 uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
474 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
475 usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
476 }
477
478 bufferCount += kConsumerBufferCount;
479
480 mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
481 mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
482 mVideoBufferProducer = producer;
483
484 status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
485 if (res != OK) {
486 ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
487 strerror(-res), res);
488 return res;
489 }
490
491 res = mVideoBufferConsumer->setDefaultBufferFormat(format);
492 if (res != OK) {
493 ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
494 strerror(-res), res);
495 return res;
496 }
497
498 res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
499 if (res != OK) {
500 ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
501 strerror(-res), res);
502 return res;
503 }
504
505 res = mCamera->setVideoTarget(mVideoBufferProducer);
506 if (res != OK) {
507 ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
508 return res;
509 }
510
511 // Create memory heap to store buffers as VideoNativeMetadata.
512 createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
513
514 mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
515 res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
516 if (res != OK) {
517 ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
518 strerror(-res), res);
519 return res;
520 }
521
522 return OK;
523 }
524
initWithCameraAccess(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)525 status_t CameraSource::initWithCameraAccess(
526 const sp<hardware::ICamera>& camera,
527 const sp<ICameraRecordingProxy>& proxy,
528 int32_t cameraId,
529 const String16& clientName,
530 uid_t clientUid,
531 pid_t clientPid,
532 Size videoSize,
533 int32_t frameRate) {
534 ALOGV("initWithCameraAccess");
535 status_t err = OK;
536
537 if ((err = isCameraAvailable(camera, proxy, cameraId,
538 clientName, clientUid, clientPid)) != OK) {
539 ALOGE("Camera connection could not be established.");
540 return err;
541 }
542 CameraParameters params(mCamera->getParameters());
543 if ((err = isCameraColorFormatSupported(params)) != OK) {
544 return err;
545 }
546
547 // Set the camera to use the requested video frame size
548 // and/or frame rate.
549 if ((err = configureCamera(¶ms,
550 videoSize.width, videoSize.height,
551 frameRate))) {
552 return err;
553 }
554
555 // Check on video frame size and frame rate.
556 CameraParameters newCameraParams(mCamera->getParameters());
557 if ((err = checkVideoSize(newCameraParams,
558 videoSize.width, videoSize.height)) != OK) {
559 return err;
560 }
561 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
562 return err;
563 }
564
565 // Set the preview display. Skip this if mSurface is null because
566 // applications may already set a surface to the camera.
567 if (mSurface != NULL) {
568 // Surface may be set incorrectly or could already be used even if we just
569 // passed the lock/unlock check earlier by calling mCamera->setParameters().
570 if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
571 return err;
572 }
573 }
574
575 // Use buffer queue to receive video buffers from camera
576 err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
577 if (err != OK) {
578 ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
579 "%s (err=%d)", __FUNCTION__, strerror(-err), err);
580 return err;
581 }
582
583 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
584 if (glitchDurationUs > mGlitchDurationThresholdUs) {
585 mGlitchDurationThresholdUs = glitchDurationUs;
586 }
587
588 // XXX: query camera for the stride and slice height
589 // when the capability becomes available.
590 mMeta = new MetaData;
591 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
592 mMeta->setInt32(kKeyColorFormat, mColorFormat);
593 mMeta->setInt32(kKeyWidth, mVideoSize.width);
594 mMeta->setInt32(kKeyHeight, mVideoSize.height);
595 mMeta->setInt32(kKeyStride, mVideoSize.width);
596 mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
597 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
598 return OK;
599 }
600
~CameraSource()601 CameraSource::~CameraSource() {
602 if (mStarted) {
603 reset();
604 } else if (mInitCheck == OK) {
605 // Camera is initialized but because start() is never called,
606 // the lock on Camera is never released(). This makes sure
607 // Camera's lock is released in this case.
608 releaseCamera();
609 }
610 }
611
startCameraRecording()612 status_t CameraSource::startCameraRecording() {
613 ALOGV("startCameraRecording");
614 // Reset the identity to the current thread because media server owns the
615 // camera and recording is started by the applications. The applications
616 // will connect to the camera in ICameraRecordingProxy::startRecording.
617 int64_t token = IPCThreadState::self()->clearCallingIdentity();
618 status_t err;
619
620 // Initialize buffer queue.
621 err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
622 (android_dataspace_t)mEncoderDataSpace,
623 mNumInputBuffers > 0 ? mNumInputBuffers : 1);
624 if (err != OK) {
625 ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
626 strerror(-err), err);
627 return err;
628 }
629
630 // Start data flow
631 err = OK;
632 if (mCameraFlags & FLAGS_HOT_CAMERA) {
633 mCamera->unlock();
634 mCamera.clear();
635 if ((err = mCameraRecordingProxy->startRecording()) != OK) {
636 ALOGE("Failed to start recording, received error: %s (%d)",
637 strerror(-err), err);
638 }
639 } else {
640 mCamera->startRecording();
641 if (!mCamera->recordingEnabled()) {
642 err = -EINVAL;
643 ALOGE("Failed to start recording");
644 }
645 }
646 IPCThreadState::self()->restoreCallingIdentity(token);
647 return err;
648 }
649
start(MetaData * meta)650 status_t CameraSource::start(MetaData *meta) {
651 ALOGV("start");
652 CHECK(!mStarted);
653 if (mInitCheck != OK) {
654 ALOGE("CameraSource is not initialized yet");
655 return mInitCheck;
656 }
657
658 if (property_get_bool("media.stagefright.record-stats", false)) {
659 mCollectStats = true;
660 }
661
662 mStartTimeUs = 0;
663 mNumInputBuffers = 0;
664 mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
665 mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
666
667 if (meta) {
668 int64_t startTimeUs;
669 if (meta->findInt64(kKeyTime, &startTimeUs)) {
670 mStartTimeUs = startTimeUs;
671 }
672
673 int32_t nBuffers;
674 if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
675 CHECK_GT(nBuffers, 0);
676 mNumInputBuffers = nBuffers;
677 }
678
679 // apply encoder color format if specified
680 if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
681 ALOGI("Using encoder format: %#x", mEncoderFormat);
682 }
683 if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
684 ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
685 mBufferDataSpace = mEncoderDataSpace;
686 }
687 }
688
689 status_t err;
690 if ((err = startCameraRecording()) == OK) {
691 mStarted = true;
692 }
693
694 return err;
695 }
696
stopCameraRecording()697 void CameraSource::stopCameraRecording() {
698 ALOGV("stopCameraRecording");
699 if (mCameraFlags & FLAGS_HOT_CAMERA) {
700 if (mCameraRecordingProxy != 0) {
701 mCameraRecordingProxy->stopRecording();
702 }
703 } else {
704 if (mCamera != 0) {
705 mCamera->stopRecording();
706 }
707 }
708 }
709
releaseCamera()710 void CameraSource::releaseCamera() {
711 ALOGV("releaseCamera");
712 sp<Camera> camera;
713 bool coldCamera = false;
714 {
715 Mutex::Autolock autoLock(mLock);
716 // get a local ref and clear ref to mCamera now
717 camera = mCamera;
718 mCamera.clear();
719 coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
720 }
721
722 if (camera != 0) {
723 int64_t token = IPCThreadState::self()->clearCallingIdentity();
724 if (coldCamera) {
725 ALOGV("Camera was cold when we started, stopping preview");
726 camera->stopPreview();
727 camera->disconnect();
728 }
729 camera->unlock();
730 IPCThreadState::self()->restoreCallingIdentity(token);
731 }
732
733 {
734 Mutex::Autolock autoLock(mLock);
735 if (mCameraRecordingProxy != 0) {
736 IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
737 mCameraRecordingProxy.clear();
738 }
739 mCameraFlags = 0;
740 }
741 }
742
reset()743 status_t CameraSource::reset() {
744 ALOGD("reset: E");
745
746 {
747 Mutex::Autolock autoLock(mLock);
748 mStarted = false;
749 mEos = false;
750 mStopSystemTimeUs = -1;
751 mFrameAvailableCondition.signal();
752
753 int64_t token;
754 bool isTokenValid = false;
755 if (mCamera != 0) {
756 token = IPCThreadState::self()->clearCallingIdentity();
757 isTokenValid = true;
758 }
759 releaseQueuedFrames();
760 while (!mFramesBeingEncoded.empty()) {
761 if (NO_ERROR !=
762 mFrameCompleteCondition.waitRelative(mLock,
763 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
764 ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
765 mFramesBeingEncoded.size());
766 }
767 }
768 stopCameraRecording();
769 if (isTokenValid) {
770 IPCThreadState::self()->restoreCallingIdentity(token);
771 }
772
773 if (mCollectStats) {
774 ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
775 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
776 mLastFrameTimestampUs - mFirstFrameTimeUs);
777 }
778
779 if (mNumGlitches > 0) {
780 ALOGW("%d long delays between neighboring video frames", mNumGlitches);
781 }
782
783 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
784 }
785
786 if (mBufferQueueListener != nullptr) {
787 mBufferQueueListener->requestExit();
788 mBufferQueueListener->join();
789 mBufferQueueListener.clear();
790 }
791
792 mVideoBufferConsumer.clear();
793 mVideoBufferProducer.clear();
794 releaseCamera();
795
796 ALOGD("reset: X");
797 return OK;
798 }
799
releaseRecordingFrame(const sp<IMemory> & frame)800 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
801 ALOGV("releaseRecordingFrame");
802
803 // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
804 ssize_t offset;
805 size_t size;
806 sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
807 if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
808 ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
809 heap->getHeapID(), mMemoryHeapBase->getHeapID());
810 return;
811 }
812
813 VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
814 (uint8_t*)heap->getBase() + offset);
815
816 // Find the corresponding buffer item for the native window buffer.
817 ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
818 if (index == NAME_NOT_FOUND) {
819 ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
820 return;
821 }
822
823 BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
824 mReceivedBufferItemMap.removeItemsAt(index);
825 mVideoBufferConsumer->releaseBuffer(buffer);
826 mMemoryBases.push_back(frame);
827 mMemoryBaseAvailableCond.signal();
828 }
829
releaseQueuedFrames()830 void CameraSource::releaseQueuedFrames() {
831 List<sp<IMemory> >::iterator it;
832 while (!mFramesReceived.empty()) {
833 it = mFramesReceived.begin();
834 releaseRecordingFrame(*it);
835 mFramesReceived.erase(it);
836 ++mNumFramesDropped;
837 }
838 }
839
getFormat()840 sp<MetaData> CameraSource::getFormat() {
841 return mMeta;
842 }
843
releaseOneRecordingFrame(const sp<IMemory> & frame)844 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
845 releaseRecordingFrame(frame);
846 }
847
signalBufferReturned(MediaBufferBase * buffer)848 void CameraSource::signalBufferReturned(MediaBufferBase *buffer) {
849 ALOGV("signalBufferReturned: %p", buffer->data());
850 Mutex::Autolock autoLock(mLock);
851 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
852 it != mFramesBeingEncoded.end(); ++it) {
853 if ((*it)->unsecurePointer() == buffer->data()) {
854 releaseOneRecordingFrame((*it));
855 mFramesBeingEncoded.erase(it);
856 ++mNumFramesEncoded;
857 buffer->setObserver(0);
858 buffer->release();
859 mFrameCompleteCondition.signal();
860 return;
861 }
862 }
863 CHECK(!"signalBufferReturned: bogus buffer");
864 }
865
read(MediaBufferBase ** buffer,const ReadOptions * options)866 status_t CameraSource::read(
867 MediaBufferBase **buffer, const ReadOptions *options) {
868 ALOGV("read");
869
870 *buffer = NULL;
871
872 int64_t seekTimeUs;
873 ReadOptions::SeekMode mode;
874 if (options && options->getSeekTo(&seekTimeUs, &mode)) {
875 return ERROR_UNSUPPORTED;
876 }
877
878 sp<IMemory> frame;
879 int64_t frameTime;
880
881 {
882 Mutex::Autolock autoLock(mLock);
883 while (mStarted && !mEos && mFramesReceived.empty()) {
884 if (NO_ERROR !=
885 mFrameAvailableCondition.waitRelative(mLock,
886 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
887 if (mCameraRecordingProxy != 0 &&
888 !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
889 ALOGW("camera recording proxy is gone");
890 return ERROR_END_OF_STREAM;
891 }
892 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
893 mLastFrameTimestampUs);
894 }
895 }
896 if (!mStarted) {
897 return OK;
898 }
899 if (mFramesReceived.empty()) {
900 return ERROR_END_OF_STREAM;
901 }
902 frame = *mFramesReceived.begin();
903 mFramesReceived.erase(mFramesReceived.begin());
904
905 frameTime = *mFrameTimes.begin();
906 mFrameTimes.erase(mFrameTimes.begin());
907 mFramesBeingEncoded.push_back(frame);
908 // TODO: Using unsecurePointer() has some associated security pitfalls
909 // (see declaration for details).
910 // Either document why it is safe in this case or address the
911 // issue (e.g. by copying).
912 *buffer = new MediaBuffer(frame->unsecurePointer(), frame->size());
913 (*buffer)->setObserver(this);
914 (*buffer)->add_ref();
915 (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
916 if (mBufferDataSpace != mEncoderDataSpace) {
917 ALOGD("Data space updated to %x", mBufferDataSpace);
918 (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
919 mEncoderDataSpace = mBufferDataSpace;
920 }
921 }
922 return OK;
923 }
924
setStopTimeUs(int64_t stopTimeUs)925 status_t CameraSource::setStopTimeUs(int64_t stopTimeUs) {
926 Mutex::Autolock autoLock(mLock);
927 ALOGV("Set stoptime: %lld us", (long long)stopTimeUs);
928
929 if (stopTimeUs < -1) {
930 ALOGE("Invalid stop time %lld us", (long long)stopTimeUs);
931 return BAD_VALUE;
932 } else if (stopTimeUs == -1) {
933 ALOGI("reset stopTime to be -1");
934 }
935
936 mStopSystemTimeUs = stopTimeUs;
937 return OK;
938 }
939
shouldSkipFrameLocked(int64_t timestampUs)940 bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
941 if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
942 ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
943 return true;
944 }
945
946 if (mStopSystemTimeUs != -1 && timestampUs >= mStopSystemTimeUs) {
947 ALOGV("Drop Camera frame at %lld stop time: %lld us",
948 (long long)timestampUs, (long long)mStopSystemTimeUs);
949 mEos = true;
950 mFrameAvailableCondition.signal();
951 return true;
952 }
953
954 // May need to skip frame or modify timestamp. Currently implemented
955 // by the subclass CameraSourceTimeLapse.
956 if (skipCurrentFrame(timestampUs)) {
957 return true;
958 }
959
960 if (mNumFramesReceived > 0) {
961 if (timestampUs <= mLastFrameTimestampUs) {
962 ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
963 (long long)timestampUs, (long long)mLastFrameTimestampUs);
964 return true;
965 }
966 if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
967 ++mNumGlitches;
968 }
969 }
970
971 mLastFrameTimestampUs = timestampUs;
972 if (mNumFramesReceived == 0) {
973 mFirstFrameTimeUs = timestampUs;
974 // Initial delay
975 if (mStartTimeUs > 0) {
976 if (timestampUs < mStartTimeUs) {
977 // Frame was captured before recording was started
978 // Drop it without updating the statistical data.
979 return true;
980 }
981 mStartTimeUs = timestampUs - mStartTimeUs;
982 }
983 }
984
985 return false;
986 }
987
BufferQueueListener(const sp<BufferItemConsumer> & consumer,const sp<CameraSource> & cameraSource)988 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
989 const sp<CameraSource>& cameraSource) {
990 mConsumer = consumer;
991 mConsumer->setFrameAvailableListener(this);
992 mCameraSource = cameraSource;
993 }
994
onFrameAvailable(const BufferItem &)995 void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
996 ALOGV("%s: onFrameAvailable", __FUNCTION__);
997
998 Mutex::Autolock l(mLock);
999
1000 if (!mFrameAvailable) {
1001 mFrameAvailable = true;
1002 mFrameAvailableSignal.signal();
1003 }
1004 }
1005
threadLoop()1006 bool CameraSource::BufferQueueListener::threadLoop() {
1007 if (mConsumer == nullptr || mCameraSource == nullptr) {
1008 return false;
1009 }
1010
1011 {
1012 Mutex::Autolock l(mLock);
1013 while (!mFrameAvailable) {
1014 if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1015 return true;
1016 }
1017 }
1018 mFrameAvailable = false;
1019 }
1020
1021 BufferItem buffer;
1022 while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1023 mCameraSource->processBufferQueueFrame(buffer);
1024 }
1025
1026 return true;
1027 }
1028
processBufferQueueFrame(BufferItem & buffer)1029 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1030 Mutex::Autolock autoLock(mLock);
1031
1032 int64_t timestampUs = buffer.mTimestamp / 1000;
1033 if (shouldSkipFrameLocked(timestampUs)) {
1034 mVideoBufferConsumer->releaseBuffer(buffer);
1035 return;
1036 }
1037
1038 while (mMemoryBases.empty()) {
1039 if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1040 TIMED_OUT) {
1041 ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1042 mVideoBufferConsumer->releaseBuffer(buffer);
1043 return;
1044 }
1045 }
1046
1047 ++mNumFramesReceived;
1048
1049 // Find a available memory slot to store the buffer as VideoNativeMetadata.
1050 sp<IMemory> data = *mMemoryBases.begin();
1051 mMemoryBases.erase(mMemoryBases.begin());
1052 mBufferDataSpace = buffer.mDataSpace;
1053
1054 ssize_t offset;
1055 size_t size;
1056 sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1057 VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1058 (uint8_t*)heap->getBase() + offset);
1059 memset(payload, 0, sizeof(VideoNativeMetadata));
1060 payload->eType = kMetadataBufferTypeANWBuffer;
1061 payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1062 payload->nFenceFd = -1;
1063
1064 // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1065 // when the encoder returns the native window buffer.
1066 mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1067
1068 mFramesReceived.push_back(data);
1069 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1070 mFrameTimes.push_back(timeUs);
1071 ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1072 mStartTimeUs, timeUs);
1073 mFrameAvailableCondition.signal();
1074 }
1075
metaDataStoredInVideoBuffers() const1076 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1077 ALOGV("metaDataStoredInVideoBuffers");
1078
1079 return kMetadataBufferTypeANWBuffer;
1080 }
1081
binderDied(const wp<IBinder> & who __unused)1082 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1083 ALOGI("Camera recording proxy died");
1084 }
1085
1086 } // namespace android
1087