1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <inttypes.h>
22 #include <utils/Log.h>
23 #include <utils/Trace.h>
24
25 #include <cutils/properties.h>
26 #include <gui/Surface.h>
27 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
28
29 #include "api1/Camera2Client.h"
30
31 #include "api1/client2/StreamingProcessor.h"
32 #include "api1/client2/JpegProcessor.h"
33 #include "api1/client2/CaptureSequencer.h"
34 #include "api1/client2/CallbackProcessor.h"
35 #include "api1/client2/ZslProcessor.h"
36 #include "utils/CameraThreadState.h"
37
38 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
39 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
40
41 #ifndef FALLTHROUGH_INTENDED
42 #define FALLTHROUGH_INTENDED [[fallthrough]]
43 #endif
44
45 namespace android {
46 using namespace camera2;
47
48 // Interface used by CameraService
49
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,const String16 & clientPackageName,const String8 & cameraDeviceId,int api1CameraId,int cameraFacing,int clientPid,uid_t clientUid,int servicePid)50 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
51 const sp<hardware::ICameraClient>& cameraClient,
52 const String16& clientPackageName,
53 const String8& cameraDeviceId,
54 int api1CameraId,
55 int cameraFacing,
56 int clientPid,
57 uid_t clientUid,
58 int servicePid):
59 Camera2ClientBase(cameraService, cameraClient, clientPackageName,
60 cameraDeviceId, api1CameraId, cameraFacing,
61 clientPid, clientUid, servicePid),
62 mParameters(api1CameraId, cameraFacing)
63 {
64 ATRACE_CALL();
65
66 SharedParameters::Lock l(mParameters);
67 l.mParameters.state = Parameters::DISCONNECTED;
68 }
69
initialize(sp<CameraProviderManager> manager,const String8 & monitorTags)70 status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
71 return initializeImpl(manager, monitorTags);
72 }
73
isZslEnabledInStillTemplate()74 bool Camera2Client::isZslEnabledInStillTemplate() {
75 bool zslEnabled = false;
76 CameraMetadata stillTemplate;
77 status_t res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE, &stillTemplate);
78 if (res == OK) {
79 camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
80 if (enableZsl.count == 1) {
81 zslEnabled = (enableZsl.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE);
82 }
83 }
84
85 return zslEnabled;
86 }
87
88 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr,const String8 & monitorTags)89 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
90 {
91 ATRACE_CALL();
92 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
93 status_t res;
94
95 res = Camera2ClientBase::initialize(providerPtr, monitorTags);
96 if (res != OK) {
97 return res;
98 }
99
100 {
101 SharedParameters::Lock l(mParameters);
102
103 res = l.mParameters.initialize(mDevice.get(), mDeviceVersion);
104 if (res != OK) {
105 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
106 __FUNCTION__, mCameraId, strerror(-res), res);
107 return NO_INIT;
108 }
109
110 l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
111 }
112
113 String8 threadName;
114
115 mStreamingProcessor = new StreamingProcessor(this);
116 threadName = String8::format("C2-%d-StreamProc",
117 mCameraId);
118
119 mFrameProcessor = new FrameProcessor(mDevice, this);
120 threadName = String8::format("C2-%d-FrameProc",
121 mCameraId);
122 mFrameProcessor->run(threadName.string());
123
124 mCaptureSequencer = new CaptureSequencer(this);
125 threadName = String8::format("C2-%d-CaptureSeq",
126 mCameraId);
127 mCaptureSequencer->run(threadName.string());
128
129 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
130 threadName = String8::format("C2-%d-JpegProc",
131 mCameraId);
132 mJpegProcessor->run(threadName.string());
133
134 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
135
136 threadName = String8::format("C2-%d-ZslProc",
137 mCameraId);
138 mZslProcessor->run(threadName.string());
139
140 mCallbackProcessor = new CallbackProcessor(this);
141 threadName = String8::format("C2-%d-CallbkProc",
142 mCameraId);
143 mCallbackProcessor->run(threadName.string());
144
145 if (gLogLevel >= 1) {
146 SharedParameters::Lock l(mParameters);
147 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
148 mCameraId);
149 ALOGD("%s", l.mParameters.paramsFlattened.string());
150 }
151
152 return OK;
153 }
154
~Camera2Client()155 Camera2Client::~Camera2Client() {
156 ATRACE_CALL();
157 ALOGV("~Camera2Client");
158
159 mDestructionStarted = true;
160
161 disconnect();
162
163 ALOGI("Camera %d: Closed", mCameraId);
164 }
165
dump(int fd,const Vector<String16> & args)166 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
167 return BasicClient::dump(fd, args);
168 }
169
dumpClient(int fd,const Vector<String16> & args)170 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
171 String8 result;
172 result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
173 (getRemoteCallback() != NULL ?
174 (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
175 mClientPid);
176 result.append(" State: ");
177 #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
178
179 const Parameters& p = mParameters.unsafeAccess();
180
181 result.append(Parameters::getStateName(p.state));
182
183 result.append("\n Current parameters:\n");
184 result.appendFormat(" Preview size: %d x %d\n",
185 p.previewWidth, p.previewHeight);
186 result.appendFormat(" Preview FPS range: %d - %d\n",
187 p.previewFpsRange[0], p.previewFpsRange[1]);
188 result.appendFormat(" Preview HAL pixel format: 0x%x\n",
189 p.previewFormat);
190 result.appendFormat(" Preview transform: %x\n",
191 p.previewTransform);
192 result.appendFormat(" Picture size: %d x %d\n",
193 p.pictureWidth, p.pictureHeight);
194 result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
195 p.jpegThumbSize[0], p.jpegThumbSize[1]);
196 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
197 p.jpegQuality, p.jpegThumbQuality);
198 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
199 result.appendFormat(" GPS tags %s\n",
200 p.gpsEnabled ? "enabled" : "disabled");
201 if (p.gpsEnabled) {
202 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
203 p.gpsCoordinates[0], p.gpsCoordinates[1],
204 p.gpsCoordinates[2]);
205 result.appendFormat(" GPS timestamp: %" PRId64 "\n",
206 p.gpsTimestamp);
207 result.appendFormat(" GPS processing method: %s\n",
208 p.gpsProcessingMethod.string());
209 }
210
211 result.append(" White balance mode: ");
212 switch (p.wbMode) {
213 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
214 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
215 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
216 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
217 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
218 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
219 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
220 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
221 default: result.append("UNKNOWN\n");
222 }
223
224 result.append(" Effect mode: ");
225 switch (p.effectMode) {
226 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
227 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
228 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
229 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
230 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
231 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
232 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
233 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
234 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
235 default: result.append("UNKNOWN\n");
236 }
237
238 result.append(" Antibanding mode: ");
239 switch (p.antibandingMode) {
240 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
241 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
242 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
243 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
244 default: result.append("UNKNOWN\n");
245 }
246
247 result.append(" Scene mode: ");
248 switch (p.sceneMode) {
249 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
250 result.append("AUTO\n"); break;
251 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
252 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
253 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
254 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
255 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
256 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
257 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
258 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
259 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
260 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
261 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
262 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
263 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
264 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
265 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
266 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
267 default: result.append("UNKNOWN\n");
268 }
269
270 result.append(" Flash mode: ");
271 switch (p.flashMode) {
272 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
273 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
274 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
275 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
276 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
277 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
278 default: result.append("UNKNOWN\n");
279 }
280
281 result.append(" Focus mode: ");
282 switch (p.focusMode) {
283 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
284 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
285 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
286 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
287 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
288 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
289 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
290 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
291 default: result.append("UNKNOWN\n");
292 }
293
294 result.append(" Focus state: ");
295 switch (p.focusState) {
296 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
297 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
298 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
299 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
300 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
301 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
302 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
303 default: result.append("UNKNOWN\n");
304 }
305
306 result.append(" Focusing areas:\n");
307 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
308 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
309 p.focusingAreas[i].left,
310 p.focusingAreas[i].top,
311 p.focusingAreas[i].right,
312 p.focusingAreas[i].bottom,
313 p.focusingAreas[i].weight);
314 }
315
316 result.appendFormat(" Exposure compensation index: %d\n",
317 p.exposureCompensation);
318
319 result.appendFormat(" AE lock %s, AWB lock %s\n",
320 p.autoExposureLock ? "enabled" : "disabled",
321 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
322
323 result.appendFormat(" Metering areas:\n");
324 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
325 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
326 p.meteringAreas[i].left,
327 p.meteringAreas[i].top,
328 p.meteringAreas[i].right,
329 p.meteringAreas[i].bottom,
330 p.meteringAreas[i].weight);
331 }
332
333 result.appendFormat(" Zoom index: %d\n", p.zoom);
334 result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
335 p.videoHeight);
336
337 result.appendFormat(" Recording hint is %s\n",
338 p.recordingHint ? "set" : "not set");
339
340 result.appendFormat(" Video stabilization is %s\n",
341 p.videoStabilization ? "enabled" : "disabled");
342
343 result.appendFormat(" Selected still capture FPS range: %d - %d\n",
344 p.fastInfo.bestStillCaptureFpsRange[0],
345 p.fastInfo.bestStillCaptureFpsRange[1]);
346
347 result.appendFormat(" Use zero shutter lag: %s\n",
348 p.useZeroShutterLag() ? "yes" : "no");
349
350 result.append(" Current streams:\n");
351 result.appendFormat(" Preview stream ID: %d\n",
352 getPreviewStreamId());
353 result.appendFormat(" Capture stream ID: %d\n",
354 getCaptureStreamId());
355 result.appendFormat(" Recording stream ID: %d\n",
356 getRecordingStreamId());
357
358 result.append(" Quirks for this camera:\n");
359 bool haveQuirk = false;
360 if (p.quirks.triggerAfWithAuto) {
361 result.appendFormat(" triggerAfWithAuto\n");
362 haveQuirk = true;
363 }
364 if (p.quirks.useZslFormat) {
365 result.appendFormat(" useZslFormat\n");
366 haveQuirk = true;
367 }
368 if (p.quirks.meteringCropRegion) {
369 result.appendFormat(" meteringCropRegion\n");
370 haveQuirk = true;
371 }
372 if (p.quirks.partialResults) {
373 result.appendFormat(" usePartialResult\n");
374 haveQuirk = true;
375 }
376 if (!haveQuirk) {
377 result.appendFormat(" none\n");
378 }
379
380 write(fd, result.string(), result.size());
381
382 mStreamingProcessor->dump(fd, args);
383
384 mCaptureSequencer->dump(fd, args);
385
386 mFrameProcessor->dump(fd, args);
387
388 mZslProcessor->dump(fd, args);
389
390 return dumpDevice(fd, args);
391 #undef CASE_APPEND_ENUM
392 }
393
394 // ICamera interface
395
disconnect()396 binder::Status Camera2Client::disconnect() {
397 ATRACE_CALL();
398 Mutex::Autolock icl(mBinderSerializationLock);
399
400 binder::Status res = binder::Status::ok();
401 // Allow both client and the cameraserver to disconnect at all times
402 int callingPid = CameraThreadState::getCallingPid();
403 if (callingPid != mClientPid && callingPid != mServicePid) return res;
404
405 if (mDevice == 0) return res;
406
407 ALOGV("Camera %d: Shutting down", mCameraId);
408
409 /**
410 * disconnect() cannot call any methods that might need to promote a
411 * wp<Camera2Client>, since disconnect can be called from the destructor, at
412 * which point all such promotions will fail.
413 */
414
415 stopPreviewL();
416
417 {
418 SharedParameters::Lock l(mParameters);
419 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
420 l.mParameters.state = Parameters::DISCONNECTED;
421 }
422
423 mFrameProcessor->requestExit();
424 mCaptureSequencer->requestExit();
425 mJpegProcessor->requestExit();
426 mZslProcessor->requestExit();
427 mCallbackProcessor->requestExit();
428
429 ALOGV("Camera %d: Waiting for threads", mCameraId);
430
431 {
432 // Don't wait with lock held, in case the other threads need to
433 // complete callbacks that re-enter Camera2Client
434 mBinderSerializationLock.unlock();
435
436 mFrameProcessor->join();
437 mCaptureSequencer->join();
438 mJpegProcessor->join();
439 mZslProcessor->join();
440 mCallbackProcessor->join();
441
442 mBinderSerializationLock.lock();
443 }
444
445 ALOGV("Camera %d: Deleting streams", mCameraId);
446
447 mStreamingProcessor->deletePreviewStream();
448 mStreamingProcessor->deleteRecordingStream();
449 mJpegProcessor->deleteStream();
450 mCallbackProcessor->deleteStream();
451 mZslProcessor->deleteStream();
452
453 ALOGV("Camera %d: Disconnecting device", mCameraId);
454
455 mDevice->disconnect();
456
457 CameraService::Client::disconnect();
458
459 return res;
460 }
461
connect(const sp<hardware::ICameraClient> & client)462 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
463 ATRACE_CALL();
464 ALOGV("%s: E", __FUNCTION__);
465 Mutex::Autolock icl(mBinderSerializationLock);
466
467 if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
468 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
469 "current locked to pid %d", __FUNCTION__,
470 mCameraId, CameraThreadState::getCallingPid(), mClientPid);
471 return BAD_VALUE;
472 }
473
474 mClientPid = CameraThreadState::getCallingPid();
475
476 mRemoteCallback = client;
477 mSharedCameraCallbacks = client;
478
479 return OK;
480 }
481
lock()482 status_t Camera2Client::lock() {
483 ATRACE_CALL();
484 ALOGV("%s: E", __FUNCTION__);
485 Mutex::Autolock icl(mBinderSerializationLock);
486 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
487 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
488
489 if (mClientPid == 0) {
490 mClientPid = CameraThreadState::getCallingPid();
491 return OK;
492 }
493
494 if (mClientPid != CameraThreadState::getCallingPid()) {
495 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
496 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
497 return EBUSY;
498 }
499
500 return OK;
501 }
502
unlock()503 status_t Camera2Client::unlock() {
504 ATRACE_CALL();
505 ALOGV("%s: E", __FUNCTION__);
506 Mutex::Autolock icl(mBinderSerializationLock);
507 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
508 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
509
510 if (mClientPid == CameraThreadState::getCallingPid()) {
511 SharedParameters::Lock l(mParameters);
512 if (l.mParameters.state == Parameters::RECORD ||
513 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
514 ALOGD("Not allowed to unlock camera during recording.");
515 return INVALID_OPERATION;
516 }
517 mClientPid = 0;
518 mRemoteCallback.clear();
519 mSharedCameraCallbacks.clear();
520 return OK;
521 }
522
523 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
524 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
525 return EBUSY;
526 }
527
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)528 status_t Camera2Client::setPreviewTarget(
529 const sp<IGraphicBufferProducer>& bufferProducer) {
530 ATRACE_CALL();
531 ALOGV("%s: E", __FUNCTION__);
532 Mutex::Autolock icl(mBinderSerializationLock);
533 status_t res;
534 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
535
536 sp<IBinder> binder;
537 sp<Surface> window;
538 if (bufferProducer != 0) {
539 binder = IInterface::asBinder(bufferProducer);
540 // Using controlledByApp flag to ensure that the buffer queue remains in
541 // async mode for the old camera API, where many applications depend
542 // on that behavior.
543 window = new Surface(bufferProducer, /*controlledByApp*/ true);
544 }
545 return setPreviewWindowL(binder, window);
546 }
547
setPreviewWindowL(const sp<IBinder> & binder,const sp<Surface> & window)548 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
549 const sp<Surface>& window) {
550 ATRACE_CALL();
551 status_t res;
552
553 if (binder == mPreviewSurface) {
554 ALOGV("%s: Camera %d: New window is same as old window",
555 __FUNCTION__, mCameraId);
556 return NO_ERROR;
557 }
558
559 Parameters::State state;
560 {
561 SharedParameters::Lock l(mParameters);
562 state = l.mParameters.state;
563 }
564 switch (state) {
565 case Parameters::DISCONNECTED:
566 case Parameters::RECORD:
567 case Parameters::STILL_CAPTURE:
568 case Parameters::VIDEO_SNAPSHOT:
569 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
570 __FUNCTION__, mCameraId,
571 Parameters::getStateName(state));
572 return INVALID_OPERATION;
573 case Parameters::STOPPED:
574 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
575 // OK
576 break;
577 case Parameters::PREVIEW:
578 // Already running preview - need to stop and create a new stream
579 res = stopStream();
580 if (res != OK) {
581 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
582 __FUNCTION__, strerror(-res), res);
583 return res;
584 }
585 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
586 break;
587 }
588
589 mPreviewSurface = binder;
590 res = mStreamingProcessor->setPreviewWindow(window);
591 if (res != OK) {
592 ALOGE("%s: Unable to set new preview window: %s (%d)",
593 __FUNCTION__, strerror(-res), res);
594 return res;
595 }
596
597 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
598 SharedParameters::Lock l(mParameters);
599 l.mParameters.state = state;
600 return startPreviewL(l.mParameters, false);
601 }
602
603 return OK;
604 }
605
setPreviewCallbackFlag(int flag)606 void Camera2Client::setPreviewCallbackFlag(int flag) {
607 ATRACE_CALL();
608 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
609 Mutex::Autolock icl(mBinderSerializationLock);
610
611 if ( checkPid(__FUNCTION__) != OK) return;
612
613 SharedParameters::Lock l(mParameters);
614 setPreviewCallbackFlagL(l.mParameters, flag);
615 }
616
setPreviewCallbackFlagL(Parameters & params,int flag)617 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
618 status_t res = OK;
619
620 switch(params.state) {
621 case Parameters::STOPPED:
622 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
623 case Parameters::PREVIEW:
624 case Parameters::STILL_CAPTURE:
625 // OK
626 break;
627 default:
628 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
629 ALOGE("%s: Camera %d: Can't use preview callbacks "
630 "in state %d", __FUNCTION__, mCameraId, params.state);
631 return;
632 }
633 }
634
635 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
636 ALOGV("%s: setting oneshot", __FUNCTION__);
637 params.previewCallbackOneShot = true;
638 }
639 if (params.previewCallbackFlags != (uint32_t)flag) {
640
641 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
642 // Disable any existing preview callback window when enabling
643 // preview callback flags
644 res = mCallbackProcessor->setCallbackWindow(NULL);
645 if (res != OK) {
646 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
647 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
648 return;
649 }
650 params.previewCallbackSurface = false;
651 }
652
653 params.previewCallbackFlags = flag;
654
655 if (params.state == Parameters::PREVIEW) {
656 res = startPreviewL(params, true);
657 if (res != OK) {
658 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
659 __FUNCTION__, mCameraId,
660 Parameters::getStateName(params.state));
661 }
662 }
663 }
664 }
665
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)666 status_t Camera2Client::setPreviewCallbackTarget(
667 const sp<IGraphicBufferProducer>& callbackProducer) {
668 ATRACE_CALL();
669 ALOGV("%s: E", __FUNCTION__);
670 Mutex::Autolock icl(mBinderSerializationLock);
671 status_t res;
672 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
673
674 sp<Surface> window;
675 if (callbackProducer != 0) {
676 window = new Surface(callbackProducer);
677 }
678
679 res = mCallbackProcessor->setCallbackWindow(window);
680 if (res != OK) {
681 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
682 __FUNCTION__, mCameraId, strerror(-res), res);
683 return res;
684 }
685
686 SharedParameters::Lock l(mParameters);
687
688 if (window != NULL) {
689 // Disable traditional callbacks when a valid callback target is given
690 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
691 l.mParameters.previewCallbackOneShot = false;
692 l.mParameters.previewCallbackSurface = true;
693 } else {
694 // Disable callback target if given a NULL interface.
695 l.mParameters.previewCallbackSurface = false;
696 }
697
698 switch(l.mParameters.state) {
699 case Parameters::PREVIEW:
700 res = startPreviewL(l.mParameters, true);
701 break;
702 case Parameters::RECORD:
703 case Parameters::VIDEO_SNAPSHOT:
704 res = startRecordingL(l.mParameters, true);
705 break;
706 default:
707 break;
708 }
709 if (res != OK) {
710 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
711 __FUNCTION__, mCameraId,
712 Parameters::getStateName(l.mParameters.state));
713 }
714
715 return OK;
716 }
717
718
startPreview()719 status_t Camera2Client::startPreview() {
720 ATRACE_CALL();
721 ALOGV("%s: E", __FUNCTION__);
722 Mutex::Autolock icl(mBinderSerializationLock);
723 status_t res;
724 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
725 SharedParameters::Lock l(mParameters);
726 return startPreviewL(l.mParameters, false);
727 }
728
startPreviewL(Parameters & params,bool restart)729 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
730 ATRACE_CALL();
731 status_t res;
732
733 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
734
735 if ( (params.state == Parameters::PREVIEW ||
736 params.state == Parameters::RECORD ||
737 params.state == Parameters::VIDEO_SNAPSHOT)
738 && !restart) {
739 // Succeed attempt to re-enter a streaming state
740 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
741 __FUNCTION__, mCameraId);
742 return OK;
743 }
744 if (params.state > Parameters::PREVIEW && !restart) {
745 ALOGE("%s: Can't start preview in state %s",
746 __FUNCTION__,
747 Parameters::getStateName(params.state));
748 return INVALID_OPERATION;
749 }
750
751 if (!mStreamingProcessor->haveValidPreviewWindow()) {
752 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
753 return OK;
754 }
755 params.state = Parameters::STOPPED;
756 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
757
758 res = mStreamingProcessor->updatePreviewStream(params);
759 if (res != OK) {
760 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
761 __FUNCTION__, mCameraId, strerror(-res), res);
762 return res;
763 }
764
765 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
766
767 // We could wait to create the JPEG output stream until first actual use
768 // (first takePicture call). However, this would substantially increase the
769 // first capture latency on HAL3 devices.
770 // So create it unconditionally at preview start. As a drawback,
771 // this increases gralloc memory consumption for applications that don't
772 // ever take a picture. Do not enter this mode when jpeg stream will slow
773 // down preview.
774 // TODO: Find a better compromise, though this likely would involve HAL
775 // changes.
776 int lastJpegStreamId = mJpegProcessor->getStreamId();
777 // If jpeg stream will slow down preview, make sure we remove it before starting preview
778 if (params.slowJpegMode) {
779 if (lastJpegStreamId != NO_STREAM) {
780 // Pause preview if we are streaming
781 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
782 if (activeRequestId != 0) {
783 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
784 if (res != OK) {
785 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
786 __FUNCTION__, mCameraId, strerror(-res), res);
787 }
788 res = mDevice->waitUntilDrained();
789 if (res != OK) {
790 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
791 __FUNCTION__, mCameraId, strerror(-res), res);
792 }
793 }
794
795 res = mJpegProcessor->deleteStream();
796
797 if (res != OK) {
798 ALOGE("%s: Camera %d: delete Jpeg stream failed: %s (%d)",
799 __FUNCTION__, mCameraId, strerror(-res), res);
800 }
801
802 if (activeRequestId != 0) {
803 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
804 if (res != OK) {
805 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
806 __FUNCTION__, mCameraId, strerror(-res), res);
807 }
808 }
809 }
810 } else {
811 res = updateProcessorStream(mJpegProcessor, params);
812 if (res != OK) {
813 ALOGE("%s: Camera %d: Can't pre-configure still image "
814 "stream: %s (%d)",
815 __FUNCTION__, mCameraId, strerror(-res), res);
816 return res;
817 }
818 }
819 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
820
821 Vector<int32_t> outputStreams;
822 bool callbacksEnabled = (params.previewCallbackFlags &
823 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
824 params.previewCallbackSurface;
825
826 if (callbacksEnabled) {
827 // Can't have recording stream hanging around when enabling callbacks,
828 // since it exceeds the max stream count on some devices.
829 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
830 ALOGV("%s: Camera %d: Clearing out recording stream before "
831 "creating callback stream", __FUNCTION__, mCameraId);
832 res = mStreamingProcessor->stopStream();
833 if (res != OK) {
834 ALOGE("%s: Camera %d: Can't stop streaming to delete "
835 "recording stream", __FUNCTION__, mCameraId);
836 return res;
837 }
838 res = mStreamingProcessor->deleteRecordingStream();
839 if (res != OK) {
840 ALOGE("%s: Camera %d: Unable to delete recording stream before "
841 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
842 strerror(-res), res);
843 return res;
844 }
845 }
846
847 res = mCallbackProcessor->updateStream(params);
848 if (res != OK) {
849 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
850 __FUNCTION__, mCameraId, strerror(-res), res);
851 return res;
852 }
853 outputStreams.push(getCallbackStreamId());
854 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
855 /**
856 * Delete the unused callback stream when preview stream is changed and
857 * preview is not enabled. Don't need stop preview stream as preview is in
858 * STOPPED state now.
859 */
860 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
861 res = mCallbackProcessor->deleteStream();
862 if (res != OK) {
863 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
864 __FUNCTION__, mCameraId, strerror(-res), res);
865 return res;
866 }
867 }
868
869 if (params.useZeroShutterLag() &&
870 getRecordingStreamId() == NO_STREAM) {
871 res = updateProcessorStream(mZslProcessor, params);
872 if (res != OK) {
873 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
874 __FUNCTION__, mCameraId, strerror(-res), res);
875 return res;
876 }
877
878 if (jpegStreamChanged) {
879 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
880 __FUNCTION__, mCameraId);
881 mZslProcessor->clearZslQueue();
882 }
883 outputStreams.push(getZslStreamId());
884 } else {
885 mZslProcessor->deleteStream();
886 }
887
888 outputStreams.push(getPreviewStreamId());
889
890 if (params.isDeviceZslSupported) {
891 // If device ZSL is supported, resume preview buffers that may be paused
892 // during last takePicture().
893 mDevice->dropStreamBuffers(false, getPreviewStreamId());
894 }
895
896 if (!params.recordingHint) {
897 if (!restart) {
898 res = mStreamingProcessor->updatePreviewRequest(params);
899 if (res != OK) {
900 ALOGE("%s: Camera %d: Can't set up preview request: "
901 "%s (%d)", __FUNCTION__, mCameraId,
902 strerror(-res), res);
903 return res;
904 }
905 }
906 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
907 outputStreams);
908 } else {
909 if (!restart) {
910 res = mStreamingProcessor->updateRecordingRequest(params);
911 if (res != OK) {
912 ALOGE("%s: Camera %d: Can't set up preview request with "
913 "record hint: %s (%d)", __FUNCTION__, mCameraId,
914 strerror(-res), res);
915 return res;
916 }
917 }
918 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
919 outputStreams);
920 }
921 if (res != OK) {
922 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
923 __FUNCTION__, mCameraId, strerror(-res), res);
924 return res;
925 }
926
927 params.state = Parameters::PREVIEW;
928 return OK;
929 }
930
stopPreview()931 void Camera2Client::stopPreview() {
932 ATRACE_CALL();
933 ALOGV("%s: E", __FUNCTION__);
934 Mutex::Autolock icl(mBinderSerializationLock);
935 status_t res;
936 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
937 stopPreviewL();
938 }
939
stopPreviewL()940 void Camera2Client::stopPreviewL() {
941 ATRACE_CALL();
942 status_t res;
943 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
944 Parameters::State state;
945 {
946 SharedParameters::Lock l(mParameters);
947 state = l.mParameters.state;
948 }
949
950 switch (state) {
951 case Parameters::DISCONNECTED:
952 // Nothing to do.
953 break;
954 case Parameters::STOPPED:
955 case Parameters::VIDEO_SNAPSHOT:
956 case Parameters::STILL_CAPTURE:
957 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
958 FALLTHROUGH_INTENDED;
959 case Parameters::RECORD:
960 case Parameters::PREVIEW:
961 syncWithDevice();
962 res = stopStream();
963 if (res != OK) {
964 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
965 __FUNCTION__, mCameraId, strerror(-res), res);
966 }
967
968 // Flush all in-process captures and buffer in order to stop
969 // preview faster.
970 res = mDevice->flush();
971 if (res != OK) {
972 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
973 __FUNCTION__, mCameraId, strerror(-res), res);
974 }
975
976 res = mDevice->waitUntilDrained();
977 if (res != OK) {
978 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
979 __FUNCTION__, mCameraId, strerror(-res), res);
980 }
981 // Clean up recording stream
982 res = mStreamingProcessor->deleteRecordingStream();
983 if (res != OK) {
984 ALOGE("%s: Camera %d: Unable to delete recording stream before "
985 "stop preview: %s (%d)",
986 __FUNCTION__, mCameraId, strerror(-res), res);
987 }
988 FALLTHROUGH_INTENDED;
989 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
990 SharedParameters::Lock l(mParameters);
991 l.mParameters.state = Parameters::STOPPED;
992 commandStopFaceDetectionL(l.mParameters);
993 break;
994 }
995 default:
996 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
997 state);
998 }
999 }
1000
previewEnabled()1001 bool Camera2Client::previewEnabled() {
1002 ATRACE_CALL();
1003 Mutex::Autolock icl(mBinderSerializationLock);
1004 status_t res;
1005 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
1006
1007 SharedParameters::Lock l(mParameters);
1008 return l.mParameters.state == Parameters::PREVIEW;
1009 }
1010
setVideoBufferMode(int32_t videoBufferMode)1011 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
1012 ATRACE_CALL();
1013 Mutex::Autolock icl(mBinderSerializationLock);
1014 status_t res;
1015 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1016
1017 SharedParameters::Lock l(mParameters);
1018 switch (l.mParameters.state) {
1019 case Parameters::RECORD:
1020 case Parameters::VIDEO_SNAPSHOT:
1021 ALOGE("%s: Camera %d: Can't be called in state %s",
1022 __FUNCTION__, mCameraId,
1023 Parameters::getStateName(l.mParameters.state));
1024 return INVALID_OPERATION;
1025 default:
1026 // OK
1027 break;
1028 }
1029
1030 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1031 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
1032 return BAD_VALUE;
1033 }
1034
1035 l.mParameters.videoBufferMode = videoBufferMode;
1036
1037 return OK;
1038 }
1039
startRecording()1040 status_t Camera2Client::startRecording() {
1041 ATRACE_CALL();
1042 ALOGV("%s: E", __FUNCTION__);
1043 Mutex::Autolock icl(mBinderSerializationLock);
1044 status_t res;
1045 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1046 SharedParameters::Lock l(mParameters);
1047
1048 return startRecordingL(l.mParameters, false);
1049 }
1050
startRecordingL(Parameters & params,bool restart)1051 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1052 status_t res = OK;
1053
1054 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1055
1056 switch (params.state) {
1057 case Parameters::STOPPED:
1058 res = startPreviewL(params, false);
1059 if (res != OK) return res;
1060 // Make sure first preview request is submitted to the HAL device to avoid
1061 // two consecutive set of configure_streams being called into the HAL.
1062 // TODO: Refactor this to avoid initial preview configuration.
1063 syncWithDevice();
1064 break;
1065 case Parameters::PREVIEW:
1066 // Ready to go
1067 break;
1068 case Parameters::RECORD:
1069 case Parameters::VIDEO_SNAPSHOT:
1070 // OK to call this when recording is already on, just skip unless
1071 // we're looking to restart
1072 if (!restart) return OK;
1073 break;
1074 default:
1075 ALOGE("%s: Camera %d: Can't start recording in state %s",
1076 __FUNCTION__, mCameraId,
1077 Parameters::getStateName(params.state));
1078 return INVALID_OPERATION;
1079 };
1080
1081 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1082 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1083 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1084 return INVALID_OPERATION;
1085 }
1086
1087 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1088 ALOGE("%s: No valid recording window", __FUNCTION__);
1089 return INVALID_OPERATION;
1090 }
1091
1092 if (!restart) {
1093 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1094 mStreamingProcessor->updateRecordingRequest(params);
1095 if (res != OK) {
1096 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1097 __FUNCTION__, mCameraId, strerror(-res), res);
1098 return res;
1099 }
1100 }
1101
1102 // Not all devices can support a preview callback stream and a recording
1103 // stream at the same time, so assume none of them can.
1104 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1105 ALOGV("%s: Camera %d: Clearing out callback stream before "
1106 "creating recording stream", __FUNCTION__, mCameraId);
1107 res = mStreamingProcessor->stopStream();
1108 if (res != OK) {
1109 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1110 __FUNCTION__, mCameraId);
1111 return res;
1112 }
1113 res = mCallbackProcessor->deleteStream();
1114 if (res != OK) {
1115 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1116 "record: %s (%d)", __FUNCTION__, mCameraId,
1117 strerror(-res), res);
1118 return res;
1119 }
1120 }
1121
1122 // Clean up ZSL before transitioning into recording
1123 if (mZslProcessor->getStreamId() != NO_STREAM) {
1124 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1125 "creating recording stream", __FUNCTION__, mCameraId);
1126 res = mStreamingProcessor->stopStream();
1127 if (res != OK) {
1128 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1129 __FUNCTION__, mCameraId);
1130 return res;
1131 }
1132 res = mDevice->waitUntilDrained();
1133 if (res != OK) {
1134 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1135 __FUNCTION__, mCameraId, strerror(-res), res);
1136 }
1137 res = mZslProcessor->clearZslQueue();
1138 if (res != OK) {
1139 ALOGE("%s: Camera %d: Can't clear zsl queue",
1140 __FUNCTION__, mCameraId);
1141 return res;
1142 }
1143 res = mZslProcessor->deleteStream();
1144 if (res != OK) {
1145 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1146 "record: %s (%d)", __FUNCTION__, mCameraId,
1147 strerror(-res), res);
1148 return res;
1149 }
1150 }
1151
1152 // Disable callbacks if they're enabled; can't record and use callbacks,
1153 // and we can't fail record start without stagefright asserting.
1154 params.previewCallbackFlags = 0;
1155
1156 // May need to reconfigure video snapshot JPEG sizes
1157 // during recording startup, so need a more complex sequence here to
1158 // ensure an early stream reconfiguration doesn't happen
1159 bool recordingStreamNeedsUpdate;
1160 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1161 if (res != OK) {
1162 ALOGE("%s: Camera %d: Can't query recording stream",
1163 __FUNCTION__, mCameraId);
1164 return res;
1165 }
1166
1167 if (recordingStreamNeedsUpdate) {
1168 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1169 // Right now camera device cannot handle configureStream failure gracefully
1170 // when device is streaming
1171 res = mStreamingProcessor->stopStream();
1172 if (res != OK) {
1173 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1174 "stream", __FUNCTION__, mCameraId);
1175 return res;
1176 }
1177 res = mDevice->waitUntilDrained();
1178 if (res != OK) {
1179 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1180 "%s (%d)", __FUNCTION__, mCameraId,
1181 strerror(-res), res);
1182 }
1183
1184 res = updateProcessorStream<
1185 StreamingProcessor,
1186 &StreamingProcessor::updateRecordingStream>(
1187 mStreamingProcessor,
1188 params);
1189 if (res != OK) {
1190 ALOGE("%s: Camera %d: Unable to update recording stream: "
1191 "%s (%d)", __FUNCTION__, mCameraId,
1192 strerror(-res), res);
1193 return res;
1194 }
1195 }
1196
1197 Vector<int32_t> outputStreams;
1198 outputStreams.push(getPreviewStreamId());
1199 outputStreams.push(getRecordingStreamId());
1200
1201 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1202 outputStreams);
1203
1204 // startStream might trigger a configureStream call and device might fail
1205 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1206 // to video size.
1207 if (res == BAD_VALUE) {
1208 overrideVideoSnapshotSize(params);
1209 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1210 outputStreams);
1211 }
1212
1213 if (res != OK) {
1214 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1215 __FUNCTION__, mCameraId, strerror(-res), res);
1216 return res;
1217 }
1218
1219 if (params.state < Parameters::RECORD) {
1220 params.state = Parameters::RECORD;
1221 }
1222
1223 return OK;
1224 }
1225
stopRecording()1226 void Camera2Client::stopRecording() {
1227 ATRACE_CALL();
1228 ALOGV("%s: E", __FUNCTION__);
1229 Mutex::Autolock icl(mBinderSerializationLock);
1230 SharedParameters::Lock l(mParameters);
1231
1232 status_t res;
1233 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1234
1235 switch (l.mParameters.state) {
1236 case Parameters::RECORD:
1237 // OK to stop
1238 break;
1239 case Parameters::STOPPED:
1240 case Parameters::PREVIEW:
1241 case Parameters::STILL_CAPTURE:
1242 case Parameters::VIDEO_SNAPSHOT:
1243 default:
1244 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1245 __FUNCTION__, mCameraId,
1246 Parameters::getStateName(l.mParameters.state));
1247 return;
1248 };
1249
1250 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1251
1252 // Remove recording stream because the video target may be abandoned soon.
1253 res = stopStream();
1254 if (res != OK) {
1255 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1256 __FUNCTION__, mCameraId, strerror(-res), res);
1257 }
1258
1259 res = mDevice->waitUntilDrained();
1260 if (res != OK) {
1261 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1262 __FUNCTION__, mCameraId, strerror(-res), res);
1263 }
1264 // Clean up recording stream
1265 res = mStreamingProcessor->deleteRecordingStream();
1266 if (res != OK) {
1267 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1268 "stop preview: %s (%d)",
1269 __FUNCTION__, mCameraId, strerror(-res), res);
1270 }
1271 l.mParameters.recoverOverriddenJpegSize();
1272
1273 // Restart preview
1274 res = startPreviewL(l.mParameters, true);
1275 if (res != OK) {
1276 ALOGE("%s: Camera %d: Unable to return to preview",
1277 __FUNCTION__, mCameraId);
1278 }
1279 }
1280
recordingEnabled()1281 bool Camera2Client::recordingEnabled() {
1282 ATRACE_CALL();
1283 Mutex::Autolock icl(mBinderSerializationLock);
1284
1285 if ( checkPid(__FUNCTION__) != OK) return false;
1286
1287 return recordingEnabledL();
1288 }
1289
recordingEnabledL()1290 bool Camera2Client::recordingEnabledL() {
1291 ATRACE_CALL();
1292 SharedParameters::Lock l(mParameters);
1293
1294 return (l.mParameters.state == Parameters::RECORD
1295 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1296 }
1297
releaseRecordingFrame(const sp<IMemory> & mem)1298 void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
1299 (void)mem;
1300 ATRACE_CALL();
1301 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1302 }
1303
releaseRecordingFrameHandle(native_handle_t * handle)1304 void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
1305 (void)handle;
1306 ATRACE_CALL();
1307 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1308 }
1309
releaseRecordingFrameHandleBatch(const std::vector<native_handle_t * > & handles)1310 void Camera2Client::releaseRecordingFrameHandleBatch(
1311 const std::vector<native_handle_t*>& handles) {
1312 (void)handles;
1313 ATRACE_CALL();
1314 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1315 }
1316
autoFocus()1317 status_t Camera2Client::autoFocus() {
1318 ATRACE_CALL();
1319 Mutex::Autolock icl(mBinderSerializationLock);
1320 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1321 status_t res;
1322 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1323
1324 int triggerId;
1325 bool notifyImmediately = false;
1326 bool notifySuccess = false;
1327 {
1328 SharedParameters::Lock l(mParameters);
1329 if (l.mParameters.state < Parameters::PREVIEW) {
1330 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1331 __FUNCTION__, mCameraId, l.mParameters.state);
1332 return INVALID_OPERATION;
1333 }
1334
1335 /**
1336 * If the camera does not support auto-focus, it is a no-op and
1337 * onAutoFocus(boolean, Camera) callback will be called immediately
1338 * with a fake value of success set to true.
1339 *
1340 * Similarly, if focus mode is set to INFINITY, there's no reason to
1341 * bother the HAL.
1342 */
1343 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1344 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1345 notifyImmediately = true;
1346 notifySuccess = true;
1347 }
1348 /**
1349 * If we're in CAF mode, and AF has already been locked, just fire back
1350 * the callback right away; the HAL would not send a notification since
1351 * no state change would happen on a AF trigger.
1352 */
1353 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1354 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1355 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1356 notifyImmediately = true;
1357 notifySuccess = true;
1358 }
1359 /**
1360 * Send immediate notification back to client
1361 */
1362 if (notifyImmediately) {
1363 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1364 if (l.mRemoteCallback != 0) {
1365 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1366 notifySuccess ? 1 : 0, 0);
1367 }
1368 return OK;
1369 }
1370 /**
1371 * Handle quirk mode for AF in scene modes
1372 */
1373 if (l.mParameters.quirks.triggerAfWithAuto &&
1374 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1375 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1376 !l.mParameters.focusingAreas[0].isEmpty()) {
1377 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1378 __FUNCTION__, l.mParameters.focusMode);
1379 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1380 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1381 updateRequests(l.mParameters);
1382 }
1383
1384 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1385 triggerId = l.mParameters.currentAfTriggerId;
1386 }
1387 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1388
1389 syncWithDevice();
1390
1391 mDevice->triggerAutofocus(triggerId);
1392
1393 return OK;
1394 }
1395
cancelAutoFocus()1396 status_t Camera2Client::cancelAutoFocus() {
1397 ATRACE_CALL();
1398 Mutex::Autolock icl(mBinderSerializationLock);
1399 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1400 status_t res;
1401 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1402
1403 int triggerId;
1404 {
1405 SharedParameters::Lock l(mParameters);
1406 // Canceling does nothing in FIXED or INFINITY modes
1407 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1408 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1409 return OK;
1410 }
1411
1412 // An active AF trigger is canceled
1413 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1414 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1415 }
1416
1417 triggerId = ++l.mParameters.afTriggerCounter;
1418
1419 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1420 // the real state at this point. No need to cancel explicitly if
1421 // changing the AF mode.
1422 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1423 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1424 l.mParameters.shadowFocusMode);
1425 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1426 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1427 updateRequests(l.mParameters);
1428
1429 return OK;
1430 }
1431 if (l.mParameters.allowZslMode) {
1432 mZslProcessor->clearZslQueue();
1433 }
1434 }
1435 syncWithDevice();
1436
1437 mDevice->triggerCancelAutofocus(triggerId);
1438
1439 return OK;
1440 }
1441
takePicture(int)1442 status_t Camera2Client::takePicture(int /*msgType*/) {
1443 ATRACE_CALL();
1444 Mutex::Autolock icl(mBinderSerializationLock);
1445 status_t res;
1446 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1447
1448 int takePictureCounter;
1449 bool shouldSyncWithDevice = true;
1450 {
1451 SharedParameters::Lock l(mParameters);
1452 switch (l.mParameters.state) {
1453 case Parameters::DISCONNECTED:
1454 case Parameters::STOPPED:
1455 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1456 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1457 __FUNCTION__, mCameraId);
1458 return INVALID_OPERATION;
1459 case Parameters::PREVIEW:
1460 // Good to go for takePicture
1461 res = commandStopFaceDetectionL(l.mParameters);
1462 if (res != OK) {
1463 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1464 __FUNCTION__, mCameraId);
1465 return res;
1466 }
1467 l.mParameters.state = Parameters::STILL_CAPTURE;
1468
1469 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1470 if (l.mParameters.isJpegSizeOverridden() &&
1471 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1472 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1473 if (res != OK) {
1474 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1475 __FUNCTION__, mCameraId, strerror(-res), res);
1476 }
1477 res = mDevice->waitUntilDrained();
1478 if (res != OK) {
1479 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1480 __FUNCTION__, mCameraId, strerror(-res), res);
1481 }
1482 // Clean up recording stream
1483 res = mStreamingProcessor->deleteRecordingStream();
1484 if (res != OK) {
1485 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1486 "stop preview: %s (%d)",
1487 __FUNCTION__, mCameraId, strerror(-res), res);
1488 }
1489 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1490 if (res != OK) {
1491 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1492 __FUNCTION__, mCameraId, strerror(-res), res);
1493 }
1494 l.mParameters.recoverOverriddenJpegSize();
1495 }
1496 break;
1497 case Parameters::RECORD:
1498 // Good to go for video snapshot
1499 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1500 break;
1501 case Parameters::STILL_CAPTURE:
1502 case Parameters::VIDEO_SNAPSHOT:
1503 ALOGE("%s: Camera %d: Already taking a picture",
1504 __FUNCTION__, mCameraId);
1505 return INVALID_OPERATION;
1506 }
1507
1508 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1509 int lastJpegStreamId = mJpegProcessor->getStreamId();
1510 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1511 if (!l.mParameters.slowJpegMode) {
1512 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1513 }
1514
1515 // If video snapshot fail to configureStream, try override video snapshot size to
1516 // video size
1517 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1518 overrideVideoSnapshotSize(l.mParameters);
1519 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1520 }
1521 if (res != OK) {
1522 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1523 __FUNCTION__, mCameraId, strerror(-res), res);
1524 return res;
1525 }
1526 takePictureCounter = ++l.mParameters.takePictureCounter;
1527
1528 // Clear ZSL buffer queue when Jpeg size is changed.
1529 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1530 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1531 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1532 __FUNCTION__, mCameraId);
1533 mZslProcessor->clearZslQueue();
1534 }
1535
1536 // We should always sync with the device in case flash is turned on,
1537 // the camera device suggests that flash is needed (AE state FLASH_REQUIRED)
1538 // or we are in some other AE state different from CONVERGED that may need
1539 // precapture trigger.
1540 if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON &&
1541 (l.mParameters.aeState == ANDROID_CONTROL_AE_STATE_CONVERGED)) {
1542 shouldSyncWithDevice = false;
1543 }
1544 }
1545
1546 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1547
1548 // Make sure HAL has correct settings in case precapture trigger is needed.
1549 if (shouldSyncWithDevice) {
1550 syncWithDevice();
1551 }
1552
1553 res = mCaptureSequencer->startCapture();
1554 if (res != OK) {
1555 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1556 __FUNCTION__, mCameraId, strerror(-res), res);
1557 }
1558
1559 return res;
1560 }
1561
setParameters(const String8 & params)1562 status_t Camera2Client::setParameters(const String8& params) {
1563 ATRACE_CALL();
1564 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1565 Mutex::Autolock icl(mBinderSerializationLock);
1566 status_t res;
1567 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1568
1569 SharedParameters::Lock l(mParameters);
1570
1571 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1572 res = l.mParameters.set(params);
1573 if (res != OK) return res;
1574 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1575
1576 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1577 mZslProcessor->clearZslQueue();
1578 }
1579
1580 res = updateRequests(l.mParameters);
1581
1582 return res;
1583 }
1584
getParameters() const1585 String8 Camera2Client::getParameters() const {
1586 ATRACE_CALL();
1587 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1588 Mutex::Autolock icl(mBinderSerializationLock);
1589 // The camera service can unconditionally get the parameters at all times
1590 if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1591
1592 SharedParameters::ReadLock l(mParameters);
1593
1594 return l.mParameters.get();
1595 }
1596
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1597 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1598 ATRACE_CALL();
1599 Mutex::Autolock icl(mBinderSerializationLock);
1600 status_t res;
1601 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1602
1603 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1604 cmd, arg1, arg2);
1605
1606 switch (cmd) {
1607 case CAMERA_CMD_START_SMOOTH_ZOOM:
1608 return commandStartSmoothZoomL();
1609 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1610 return commandStopSmoothZoomL();
1611 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1612 return commandSetDisplayOrientationL(arg1);
1613 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1614 return commandEnableShutterSoundL(arg1 == 1);
1615 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1616 return commandPlayRecordingSoundL();
1617 case CAMERA_CMD_START_FACE_DETECTION:
1618 return commandStartFaceDetectionL(arg1);
1619 case CAMERA_CMD_STOP_FACE_DETECTION: {
1620 SharedParameters::Lock l(mParameters);
1621 return commandStopFaceDetectionL(l.mParameters);
1622 }
1623 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1624 return commandEnableFocusMoveMsgL(arg1 == 1);
1625 case CAMERA_CMD_PING:
1626 return commandPingL();
1627 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1628 case CAMERA_CMD_SET_VIDEO_FORMAT:
1629 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1630 __FUNCTION__, cmd, arg1, arg2);
1631 return BAD_VALUE;
1632 default:
1633 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1634 __FUNCTION__, cmd, arg1, arg2);
1635 return BAD_VALUE;
1636 }
1637 }
1638
commandStartSmoothZoomL()1639 status_t Camera2Client::commandStartSmoothZoomL() {
1640 ALOGE("%s: Unimplemented!", __FUNCTION__);
1641 return OK;
1642 }
1643
commandStopSmoothZoomL()1644 status_t Camera2Client::commandStopSmoothZoomL() {
1645 ALOGE("%s: Unimplemented!", __FUNCTION__);
1646 return OK;
1647 }
1648
commandSetDisplayOrientationL(int degrees)1649 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1650 int transform = Parameters::degToTransform(degrees,
1651 mCameraFacing == CAMERA_FACING_FRONT);
1652 if (transform == -1) {
1653 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1654 __FUNCTION__, mCameraId, degrees);
1655 return BAD_VALUE;
1656 }
1657 SharedParameters::Lock l(mParameters);
1658 if (transform != l.mParameters.previewTransform &&
1659 getPreviewStreamId() != NO_STREAM) {
1660 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1661 }
1662 l.mParameters.previewTransform = transform;
1663 return OK;
1664 }
1665
commandEnableShutterSoundL(bool enable)1666 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1667 SharedParameters::Lock l(mParameters);
1668 if (enable) {
1669 l.mParameters.playShutterSound = true;
1670 return OK;
1671 }
1672
1673 l.mParameters.playShutterSound = false;
1674 return OK;
1675 }
1676
commandPlayRecordingSoundL()1677 status_t Camera2Client::commandPlayRecordingSoundL() {
1678 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1679 return OK;
1680 }
1681
commandStartFaceDetectionL(int)1682 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1683 ALOGV("%s: Camera %d: Starting face detection",
1684 __FUNCTION__, mCameraId);
1685 status_t res;
1686 SharedParameters::Lock l(mParameters);
1687 switch (l.mParameters.state) {
1688 case Parameters::DISCONNECTED:
1689 case Parameters::STOPPED:
1690 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1691 case Parameters::STILL_CAPTURE:
1692 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1693 __FUNCTION__, mCameraId);
1694 return INVALID_OPERATION;
1695 case Parameters::PREVIEW:
1696 case Parameters::RECORD:
1697 case Parameters::VIDEO_SNAPSHOT:
1698 // Good to go for starting face detect
1699 break;
1700 }
1701 // Ignoring type
1702 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1703 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1704 ALOGE("%s: Camera %d: Face detection not supported",
1705 __FUNCTION__, mCameraId);
1706 return BAD_VALUE;
1707 }
1708 if (l.mParameters.enableFaceDetect) return OK;
1709
1710 l.mParameters.enableFaceDetect = true;
1711
1712 res = updateRequests(l.mParameters);
1713
1714 return res;
1715 }
1716
commandStopFaceDetectionL(Parameters & params)1717 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1718 status_t res = OK;
1719 ALOGV("%s: Camera %d: Stopping face detection",
1720 __FUNCTION__, mCameraId);
1721
1722 if (!params.enableFaceDetect) return OK;
1723
1724 params.enableFaceDetect = false;
1725
1726 if (params.state == Parameters::PREVIEW
1727 || params.state == Parameters::RECORD
1728 || params.state == Parameters::VIDEO_SNAPSHOT) {
1729 res = updateRequests(params);
1730 }
1731
1732 return res;
1733 }
1734
commandEnableFocusMoveMsgL(bool enable)1735 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1736 SharedParameters::Lock l(mParameters);
1737 l.mParameters.enableFocusMoveMessages = enable;
1738
1739 return OK;
1740 }
1741
commandPingL()1742 status_t Camera2Client::commandPingL() {
1743 // Always ping back if access is proper and device is alive
1744 SharedParameters::Lock l(mParameters);
1745 if (l.mParameters.state != Parameters::DISCONNECTED) {
1746 return OK;
1747 } else {
1748 return NO_INIT;
1749 }
1750 }
1751
notifyError(int32_t errorCode,const CaptureResultExtras & resultExtras)1752 void Camera2Client::notifyError(int32_t errorCode,
1753 const CaptureResultExtras& resultExtras) {
1754 int32_t err = CAMERA_ERROR_UNKNOWN;
1755 switch(errorCode) {
1756 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1757 err = CAMERA_ERROR_RELEASED;
1758 break;
1759 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1760 err = CAMERA_ERROR_UNKNOWN;
1761 break;
1762 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1763 err = CAMERA_ERROR_SERVER_DIED;
1764 break;
1765 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1766 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1767 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1768 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1769 __FUNCTION__, errorCode, resultExtras.requestId);
1770 mCaptureSequencer->notifyError(errorCode, resultExtras);
1771 return;
1772 default:
1773 err = CAMERA_ERROR_UNKNOWN;
1774 break;
1775 }
1776
1777 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1778 resultExtras.requestId);
1779
1780 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1781 if (l.mRemoteCallback != nullptr) {
1782 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1783 }
1784 }
1785
1786
1787 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1788 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1789 ALOGV("%s: Autofocus state now %d, last trigger %d",
1790 __FUNCTION__, newState, triggerId);
1791 bool sendCompletedMessage = false;
1792 bool sendMovingMessage = false;
1793
1794 bool success = false;
1795 bool afInMotion = false;
1796 {
1797 SharedParameters::Lock l(mParameters);
1798 // Trace end of AF state
1799 char tmp[32];
1800 if (l.mParameters.afStateCounter > 0) {
1801 camera_metadata_enum_snprint(
1802 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1803 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1804 }
1805
1806 // Update state
1807 l.mParameters.focusState = newState;
1808 l.mParameters.afStateCounter++;
1809
1810 // Trace start of AF state
1811
1812 camera_metadata_enum_snprint(
1813 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1814 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1815
1816 switch (l.mParameters.focusMode) {
1817 case Parameters::FOCUS_MODE_AUTO:
1818 case Parameters::FOCUS_MODE_MACRO:
1819 // Don't send notifications upstream if they're not for the current AF
1820 // trigger. For example, if cancel was called in between, or if we
1821 // already sent a notification about this AF call.
1822 if (triggerId != l.mParameters.currentAfTriggerId) break;
1823 switch (newState) {
1824 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1825 success = true;
1826 FALLTHROUGH_INTENDED;
1827 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1828 sendCompletedMessage = true;
1829 l.mParameters.currentAfTriggerId = -1;
1830 break;
1831 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1832 // Just starting focusing, ignore
1833 break;
1834 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1835 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1836 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1837 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1838 default:
1839 // Unexpected in AUTO/MACRO mode
1840 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1841 __FUNCTION__, newState);
1842 break;
1843 }
1844 break;
1845 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1846 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1847 switch (newState) {
1848 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1849 success = true;
1850 FALLTHROUGH_INTENDED;
1851 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1852 // Don't send notifications upstream if they're not for
1853 // the current AF trigger. For example, if cancel was
1854 // called in between, or if we already sent a
1855 // notification about this AF call.
1856 // Send both a 'AF done' callback and a 'AF move' callback
1857 if (triggerId != l.mParameters.currentAfTriggerId) break;
1858 sendCompletedMessage = true;
1859 afInMotion = false;
1860 if (l.mParameters.enableFocusMoveMessages &&
1861 l.mParameters.afInMotion) {
1862 sendMovingMessage = true;
1863 }
1864 l.mParameters.currentAfTriggerId = -1;
1865 break;
1866 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1867 // Cancel was called, or we switched state; care if
1868 // currently moving
1869 afInMotion = false;
1870 if (l.mParameters.enableFocusMoveMessages &&
1871 l.mParameters.afInMotion) {
1872 sendMovingMessage = true;
1873 }
1874 break;
1875 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1876 // Start passive scan, inform upstream
1877 afInMotion = true;
1878 FALLTHROUGH_INTENDED;
1879 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1880 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1881 // Stop passive scan, inform upstream
1882 if (l.mParameters.enableFocusMoveMessages) {
1883 sendMovingMessage = true;
1884 }
1885 break;
1886 }
1887 l.mParameters.afInMotion = afInMotion;
1888 break;
1889 case Parameters::FOCUS_MODE_EDOF:
1890 case Parameters::FOCUS_MODE_INFINITY:
1891 case Parameters::FOCUS_MODE_FIXED:
1892 default:
1893 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1894 ALOGE("%s: Unexpected AF state change %d "
1895 "(ID %d) in focus mode %d",
1896 __FUNCTION__, newState, triggerId,
1897 l.mParameters.focusMode);
1898 }
1899 }
1900 }
1901 if (sendMovingMessage) {
1902 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1903 if (l.mRemoteCallback != 0) {
1904 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1905 afInMotion ? 1 : 0, 0);
1906 }
1907 }
1908 if (sendCompletedMessage) {
1909 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1910 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1911 if (l.mRemoteCallback != 0) {
1912 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1913 success ? 1 : 0, 0);
1914 }
1915 }
1916 }
1917
notifyAutoExposure(uint8_t newState,int triggerId)1918 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
1919 ALOGV("%s: Autoexposure state now %d, last trigger %d",
1920 __FUNCTION__, newState, triggerId);
1921 {
1922 SharedParameters::Lock l(mParameters);
1923 // Update state
1924 l.mParameters.aeState = newState;
1925 }
1926 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
1927 }
1928
notifyShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)1929 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
1930 nsecs_t timestamp) {
1931 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
1932 __FUNCTION__, resultExtras.requestId, timestamp);
1933 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
1934
1935 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
1936 }
1937
getParameters()1938 camera2::SharedParameters& Camera2Client::getParameters() {
1939 return mParameters;
1940 }
1941
getPreviewStreamId() const1942 int Camera2Client::getPreviewStreamId() const {
1943 return mStreamingProcessor->getPreviewStreamId();
1944 }
1945
getCaptureStreamId() const1946 int Camera2Client::getCaptureStreamId() const {
1947 return mJpegProcessor->getStreamId();
1948 }
1949
getCallbackStreamId() const1950 int Camera2Client::getCallbackStreamId() const {
1951 return mCallbackProcessor->getStreamId();
1952 }
1953
getRecordingStreamId() const1954 int Camera2Client::getRecordingStreamId() const {
1955 return mStreamingProcessor->getRecordingStreamId();
1956 }
1957
getZslStreamId() const1958 int Camera2Client::getZslStreamId() const {
1959 return mZslProcessor->getStreamId();
1960 }
1961
registerFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener,bool sendPartials)1962 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
1963 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
1964 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
1965 }
1966
removeFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener)1967 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
1968 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
1969 return mFrameProcessor->removeListener(minId, maxId, listener);
1970 }
1971
stopStream()1972 status_t Camera2Client::stopStream() {
1973 return mStreamingProcessor->stopStream();
1974 }
1975
createJpegStreamL(Parameters & params)1976 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
1977 status_t res = OK;
1978 int lastJpegStreamId = mJpegProcessor->getStreamId();
1979 if (lastJpegStreamId != NO_STREAM) {
1980 return INVALID_OPERATION;
1981 }
1982
1983 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1984 if (res != OK) {
1985 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1986 __FUNCTION__, mCameraId, strerror(-res), res);
1987 return res;
1988 }
1989
1990 res = mDevice->flush();
1991 if (res != OK) {
1992 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
1993 __FUNCTION__, mCameraId, strerror(-res), res);
1994 return res;
1995 }
1996
1997 // Ideally we don't need this, but current camera device
1998 // status tracking mechanism demands it.
1999 res = mDevice->waitUntilDrained();
2000 if (res != OK) {
2001 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
2002 __FUNCTION__, mCameraId, strerror(-res), res);
2003 }
2004
2005 res = updateProcessorStream(mJpegProcessor, params);
2006 return res;
2007 }
2008
2009 const int32_t Camera2Client::kPreviewRequestIdStart;
2010 const int32_t Camera2Client::kPreviewRequestIdEnd;
2011 const int32_t Camera2Client::kRecordingRequestIdStart;
2012 const int32_t Camera2Client::kRecordingRequestIdEnd;
2013 const int32_t Camera2Client::kCaptureRequestIdStart;
2014 const int32_t Camera2Client::kCaptureRequestIdEnd;
2015
2016 /** Utility methods */
2017
updateRequests(Parameters & params)2018 status_t Camera2Client::updateRequests(Parameters ¶ms) {
2019 status_t res;
2020
2021 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
2022
2023 res = mStreamingProcessor->incrementStreamingIds();
2024 if (res != OK) {
2025 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
2026 __FUNCTION__, mCameraId, strerror(-res), res);
2027 return res;
2028 }
2029
2030 res = mStreamingProcessor->updatePreviewRequest(params);
2031 if (res != OK) {
2032 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
2033 __FUNCTION__, mCameraId, strerror(-res), res);
2034 return res;
2035 }
2036 res = mStreamingProcessor->updateRecordingRequest(params);
2037 if (res != OK) {
2038 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
2039 __FUNCTION__, mCameraId, strerror(-res), res);
2040 return res;
2041 }
2042
2043 if (params.state == Parameters::PREVIEW) {
2044 res = startPreviewL(params, true);
2045 if (res != OK) {
2046 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2047 __FUNCTION__, mCameraId, strerror(-res), res);
2048 return res;
2049 }
2050 } else if (params.state == Parameters::RECORD ||
2051 params.state == Parameters::VIDEO_SNAPSHOT) {
2052 res = startRecordingL(params, true);
2053 if (res != OK) {
2054 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2055 __FUNCTION__, mCameraId, strerror(-res), res);
2056 return res;
2057 }
2058 }
2059 return res;
2060 }
2061
2062
calculateBufferSize(int width,int height,int format,int stride)2063 size_t Camera2Client::calculateBufferSize(int width, int height,
2064 int format, int stride) {
2065 switch (format) {
2066 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2067 return width * height * 2;
2068 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2069 return width * height * 3 / 2;
2070 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2071 return width * height * 2;
2072 case HAL_PIXEL_FORMAT_YV12: { // YV12
2073 size_t ySize = stride * height;
2074 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2075 size_t uvSize = uvStride * height / 2;
2076 return ySize + uvSize * 2;
2077 }
2078 case HAL_PIXEL_FORMAT_RGB_565:
2079 return width * height * 2;
2080 case HAL_PIXEL_FORMAT_RGBA_8888:
2081 return width * height * 4;
2082 case HAL_PIXEL_FORMAT_RAW16:
2083 return width * height * 2;
2084 default:
2085 ALOGE("%s: Unknown preview format: %x",
2086 __FUNCTION__, format);
2087 return 0;
2088 }
2089 }
2090
syncWithDevice()2091 status_t Camera2Client::syncWithDevice() {
2092 ATRACE_CALL();
2093 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2094 status_t res;
2095
2096 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2097 if (activeRequestId == 0) return OK;
2098
2099 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2100 if (res == TIMED_OUT) {
2101 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2102 __FUNCTION__, mCameraId);
2103 } else if (res != OK) {
2104 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2105 __FUNCTION__, mCameraId);
2106 }
2107 return res;
2108 }
2109
2110 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)2111 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2112 camera2::Parameters params) {
2113 // No default template arguments until C++11, so we need this overload
2114 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2115 processor, params);
2116 }
2117
2118 template <typename ProcessorT,
2119 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)2120 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2121 Parameters params) {
2122 status_t res;
2123
2124 // Get raw pointer since sp<T> doesn't have operator->*
2125 ProcessorT *processorPtr = processor.get();
2126 res = (processorPtr->*updateStreamF)(params);
2127
2128 /**
2129 * Can't update the stream if it's busy?
2130 *
2131 * Then we need to stop the device (by temporarily clearing the request
2132 * queue) and then try again. Resume streaming once we're done.
2133 */
2134 if (res == -EBUSY) {
2135 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2136 mCameraId);
2137 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2138 if (res != OK) {
2139 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2140 __FUNCTION__, mCameraId, strerror(-res), res);
2141 }
2142
2143 res = mDevice->waitUntilDrained();
2144 if (res != OK) {
2145 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2146 __FUNCTION__, mCameraId, strerror(-res), res);
2147 }
2148
2149 res = (processorPtr->*updateStreamF)(params);
2150 if (res != OK) {
2151 ALOGE("%s: Camera %d: Failed to update processing stream "
2152 " despite having halted streaming first: %s (%d)",
2153 __FUNCTION__, mCameraId, strerror(-res), res);
2154 }
2155
2156 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2157 if (res != OK) {
2158 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2159 __FUNCTION__, mCameraId, strerror(-res), res);
2160 }
2161 }
2162
2163 return res;
2164 }
2165
overrideVideoSnapshotSize(Parameters & params)2166 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2167 ALOGV("%s: Camera %d: configure still size to video size before recording"
2168 , __FUNCTION__, mCameraId);
2169 params.overrideJpegSizeByVideoSize();
2170 status_t res = updateProcessorStream(mJpegProcessor, params);
2171 if (res != OK) {
2172 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2173 __FUNCTION__, mCameraId, strerror(-res), res);
2174 }
2175 return res;
2176 }
2177
setVideoTarget(const sp<IGraphicBufferProducer> & bufferProducer)2178 status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
2179 ATRACE_CALL();
2180 ALOGV("%s: E", __FUNCTION__);
2181 Mutex::Autolock icl(mBinderSerializationLock);
2182 status_t res;
2183 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2184
2185 sp<IBinder> binder = IInterface::asBinder(bufferProducer);
2186 if (binder == mVideoSurface) {
2187 ALOGV("%s: Camera %d: New video window is same as old video window",
2188 __FUNCTION__, mCameraId);
2189 return NO_ERROR;
2190 }
2191
2192 sp<Surface> window;
2193 int format;
2194 android_dataspace dataSpace;
2195
2196 if (bufferProducer != nullptr) {
2197 // Using controlledByApp flag to ensure that the buffer queue remains in
2198 // async mode for the old camera API, where many applications depend
2199 // on that behavior.
2200 window = new Surface(bufferProducer, /*controlledByApp*/ true);
2201
2202 ANativeWindow *anw = window.get();
2203
2204 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2205 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2206 return res;
2207 }
2208
2209 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2210 reinterpret_cast<int*>(&dataSpace))) != OK) {
2211 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2212 return res;
2213 }
2214 }
2215
2216 Parameters::State state;
2217 {
2218 SharedParameters::Lock l(mParameters);
2219 state = l.mParameters.state;
2220 }
2221
2222 switch (state) {
2223 case Parameters::STOPPED:
2224 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2225 case Parameters::PREVIEW:
2226 // OK
2227 break;
2228 case Parameters::DISCONNECTED:
2229 case Parameters::RECORD:
2230 case Parameters::STILL_CAPTURE:
2231 case Parameters::VIDEO_SNAPSHOT:
2232 default:
2233 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2234 __FUNCTION__, mCameraId,
2235 Parameters::getStateName(state));
2236 return INVALID_OPERATION;
2237 }
2238
2239 mVideoSurface = binder;
2240 res = mStreamingProcessor->setRecordingWindow(window);
2241 if (res != OK) {
2242 ALOGE("%s: Unable to set new recording window: %s (%d)",
2243 __FUNCTION__, strerror(-res), res);
2244 return res;
2245 }
2246
2247 {
2248 SharedParameters::Lock l(mParameters);
2249 l.mParameters.videoFormat = format;
2250 l.mParameters.videoDataSpace = dataSpace;
2251 }
2252
2253 return OK;
2254 }
2255
2256 const char* Camera2Client::kAutofocusLabel = "autofocus";
2257 const char* Camera2Client::kTakepictureLabel = "take_picture";
2258
2259 } // namespace android
2260