1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23
24 #include <cutils/properties.h>
25 #include <gui/Surface.h>
26
27 #include "api1/Camera2Client.h"
28
29 #include "api1/client2/StreamingProcessor.h"
30 #include "api1/client2/JpegProcessor.h"
31 #include "api1/client2/CaptureSequencer.h"
32 #include "api1/client2/CallbackProcessor.h"
33 #include "api1/client2/ZslProcessor.h"
34 #include "api1/client2/ZslProcessor3.h"
35
36 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
37 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
38
39 namespace android {
40 using namespace camera2;
41
getCallingPid()42 static int getCallingPid() {
43 return IPCThreadState::self()->getCallingPid();
44 }
45
46 // Interface used by CameraService
47
Camera2Client(const sp<CameraService> & cameraService,const sp<ICameraClient> & cameraClient,const String16 & clientPackageName,int cameraId,int cameraFacing,int clientPid,uid_t clientUid,int servicePid,int deviceVersion)48 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
49 const sp<ICameraClient>& cameraClient,
50 const String16& clientPackageName,
51 int cameraId,
52 int cameraFacing,
53 int clientPid,
54 uid_t clientUid,
55 int servicePid,
56 int deviceVersion):
57 Camera2ClientBase(cameraService, cameraClient, clientPackageName,
58 cameraId, cameraFacing, clientPid, clientUid, servicePid),
59 mParameters(cameraId, cameraFacing),
60 mDeviceVersion(deviceVersion)
61 {
62 ATRACE_CALL();
63
64 SharedParameters::Lock l(mParameters);
65 l.mParameters.state = Parameters::DISCONNECTED;
66 }
67
initialize(camera_module_t * module)68 status_t Camera2Client::initialize(camera_module_t *module)
69 {
70 ATRACE_CALL();
71 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
72 status_t res;
73
74 res = Camera2ClientBase::initialize(module);
75 if (res != OK) {
76 return res;
77 }
78
79 {
80 SharedParameters::Lock l(mParameters);
81
82 res = l.mParameters.initialize(&(mDevice->info()));
83 if (res != OK) {
84 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
85 __FUNCTION__, mCameraId, strerror(-res), res);
86 return NO_INIT;
87 }
88 }
89
90 String8 threadName;
91
92 mStreamingProcessor = new StreamingProcessor(this);
93 threadName = String8::format("C2-%d-StreamProc",
94 mCameraId);
95 mStreamingProcessor->run(threadName.string());
96
97 mFrameProcessor = new FrameProcessor(mDevice, this);
98 threadName = String8::format("C2-%d-FrameProc",
99 mCameraId);
100 mFrameProcessor->run(threadName.string());
101
102 mCaptureSequencer = new CaptureSequencer(this);
103 threadName = String8::format("C2-%d-CaptureSeq",
104 mCameraId);
105 mCaptureSequencer->run(threadName.string());
106
107 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
108 threadName = String8::format("C2-%d-JpegProc",
109 mCameraId);
110 mJpegProcessor->run(threadName.string());
111
112 switch (mDeviceVersion) {
113 case CAMERA_DEVICE_API_VERSION_2_0: {
114 sp<ZslProcessor> zslProc =
115 new ZslProcessor(this, mCaptureSequencer);
116 mZslProcessor = zslProc;
117 mZslProcessorThread = zslProc;
118 break;
119 }
120 case CAMERA_DEVICE_API_VERSION_3_0:{
121 sp<ZslProcessor3> zslProc =
122 new ZslProcessor3(this, mCaptureSequencer);
123 mZslProcessor = zslProc;
124 mZslProcessorThread = zslProc;
125 break;
126 }
127 default:
128 break;
129 }
130 threadName = String8::format("C2-%d-ZslProc",
131 mCameraId);
132 mZslProcessorThread->run(threadName.string());
133
134 mCallbackProcessor = new CallbackProcessor(this);
135 threadName = String8::format("C2-%d-CallbkProc",
136 mCameraId);
137 mCallbackProcessor->run(threadName.string());
138
139 if (gLogLevel >= 1) {
140 SharedParameters::Lock l(mParameters);
141 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
142 mCameraId);
143 ALOGD("%s", l.mParameters.paramsFlattened.string());
144 }
145
146 return OK;
147 }
148
~Camera2Client()149 Camera2Client::~Camera2Client() {
150 ATRACE_CALL();
151 ALOGV("~Camera2Client");
152
153 mDestructionStarted = true;
154
155 disconnect();
156
157 ALOGI("Camera %d: Closed", mCameraId);
158 }
159
dump(int fd,const Vector<String16> & args)160 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
161 String8 result;
162 result.appendFormat("Client2[%d] (%p) Client: %s PID: %d, dump:\n",
163 mCameraId,
164 getRemoteCallback()->asBinder().get(),
165 String8(mClientPackageName).string(),
166 mClientPid);
167 result.append(" State: ");
168 #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
169
170 const Parameters& p = mParameters.unsafeAccess();
171
172 result.append(Parameters::getStateName(p.state));
173
174 result.append("\n Current parameters:\n");
175 result.appendFormat(" Preview size: %d x %d\n",
176 p.previewWidth, p.previewHeight);
177 result.appendFormat(" Preview FPS range: %d - %d\n",
178 p.previewFpsRange[0], p.previewFpsRange[1]);
179 result.appendFormat(" Preview HAL pixel format: 0x%x\n",
180 p.previewFormat);
181 result.appendFormat(" Preview transform: %x\n",
182 p.previewTransform);
183 result.appendFormat(" Picture size: %d x %d\n",
184 p.pictureWidth, p.pictureHeight);
185 result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
186 p.jpegThumbSize[0], p.jpegThumbSize[1]);
187 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
188 p.jpegQuality, p.jpegThumbQuality);
189 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
190 result.appendFormat(" GPS tags %s\n",
191 p.gpsEnabled ? "enabled" : "disabled");
192 if (p.gpsEnabled) {
193 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
194 p.gpsCoordinates[0], p.gpsCoordinates[1],
195 p.gpsCoordinates[2]);
196 result.appendFormat(" GPS timestamp: %lld\n",
197 p.gpsTimestamp);
198 result.appendFormat(" GPS processing method: %s\n",
199 p.gpsProcessingMethod.string());
200 }
201
202 result.append(" White balance mode: ");
203 switch (p.wbMode) {
204 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
205 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
206 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
207 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
208 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
209 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
210 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
211 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
212 default: result.append("UNKNOWN\n");
213 }
214
215 result.append(" Effect mode: ");
216 switch (p.effectMode) {
217 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
218 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
219 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
220 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
221 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
222 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
223 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
224 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
225 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
226 default: result.append("UNKNOWN\n");
227 }
228
229 result.append(" Antibanding mode: ");
230 switch (p.antibandingMode) {
231 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
232 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
233 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
234 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
235 default: result.append("UNKNOWN\n");
236 }
237
238 result.append(" Scene mode: ");
239 switch (p.sceneMode) {
240 case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED:
241 result.append("AUTO\n"); break;
242 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
243 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
244 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
245 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
246 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
247 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
248 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
249 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
250 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
251 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
252 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
253 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
254 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
255 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
256 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
257 default: result.append("UNKNOWN\n");
258 }
259
260 result.append(" Flash mode: ");
261 switch (p.flashMode) {
262 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
263 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
264 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
265 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
266 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
267 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
268 default: result.append("UNKNOWN\n");
269 }
270
271 result.append(" Focus mode: ");
272 switch (p.focusMode) {
273 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
274 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
275 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
276 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
277 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
278 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
279 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
280 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
281 default: result.append("UNKNOWN\n");
282 }
283
284 result.append(" Focus state: ");
285 switch (p.focusState) {
286 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
287 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
288 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
289 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
290 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
291 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
292 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
293 default: result.append("UNKNOWN\n");
294 }
295
296 result.append(" Focusing areas:\n");
297 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
298 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
299 p.focusingAreas[i].left,
300 p.focusingAreas[i].top,
301 p.focusingAreas[i].right,
302 p.focusingAreas[i].bottom,
303 p.focusingAreas[i].weight);
304 }
305
306 result.appendFormat(" Exposure compensation index: %d\n",
307 p.exposureCompensation);
308
309 result.appendFormat(" AE lock %s, AWB lock %s\n",
310 p.autoExposureLock ? "enabled" : "disabled",
311 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
312
313 result.appendFormat(" Metering areas:\n");
314 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
315 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
316 p.meteringAreas[i].left,
317 p.meteringAreas[i].top,
318 p.meteringAreas[i].right,
319 p.meteringAreas[i].bottom,
320 p.meteringAreas[i].weight);
321 }
322
323 result.appendFormat(" Zoom index: %d\n", p.zoom);
324 result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
325 p.videoHeight);
326
327 result.appendFormat(" Recording hint is %s\n",
328 p.recordingHint ? "set" : "not set");
329
330 result.appendFormat(" Video stabilization is %s\n",
331 p.videoStabilization ? "enabled" : "disabled");
332
333 result.appendFormat(" Selected still capture FPS range: %d - %d\n",
334 p.fastInfo.bestStillCaptureFpsRange[0],
335 p.fastInfo.bestStillCaptureFpsRange[1]);
336
337 result.append(" Current streams:\n");
338 result.appendFormat(" Preview stream ID: %d\n",
339 getPreviewStreamId());
340 result.appendFormat(" Capture stream ID: %d\n",
341 getCaptureStreamId());
342 result.appendFormat(" Recording stream ID: %d\n",
343 getRecordingStreamId());
344
345 result.append(" Quirks for this camera:\n");
346 bool haveQuirk = false;
347 if (p.quirks.triggerAfWithAuto) {
348 result.appendFormat(" triggerAfWithAuto\n");
349 haveQuirk = true;
350 }
351 if (p.quirks.useZslFormat) {
352 result.appendFormat(" useZslFormat\n");
353 haveQuirk = true;
354 }
355 if (p.quirks.meteringCropRegion) {
356 result.appendFormat(" meteringCropRegion\n");
357 haveQuirk = true;
358 }
359 if (p.quirks.partialResults) {
360 result.appendFormat(" usePartialResult\n");
361 haveQuirk = true;
362 }
363 if (!haveQuirk) {
364 result.appendFormat(" none\n");
365 }
366
367 write(fd, result.string(), result.size());
368
369 mStreamingProcessor->dump(fd, args);
370
371 mCaptureSequencer->dump(fd, args);
372
373 mFrameProcessor->dump(fd, args);
374
375 mZslProcessor->dump(fd, args);
376
377 return dumpDevice(fd, args);
378 #undef CASE_APPEND_ENUM
379 }
380
381 // ICamera interface
382
disconnect()383 void Camera2Client::disconnect() {
384 ATRACE_CALL();
385 Mutex::Autolock icl(mBinderSerializationLock);
386
387 // Allow both client and the media server to disconnect at all times
388 int callingPid = getCallingPid();
389 if (callingPid != mClientPid && callingPid != mServicePid) return;
390
391 if (mDevice == 0) return;
392
393 ALOGV("Camera %d: Shutting down", mCameraId);
394
395 /**
396 * disconnect() cannot call any methods that might need to promote a
397 * wp<Camera2Client>, since disconnect can be called from the destructor, at
398 * which point all such promotions will fail.
399 */
400
401 stopPreviewL();
402
403 {
404 SharedParameters::Lock l(mParameters);
405 if (l.mParameters.state == Parameters::DISCONNECTED) return;
406 l.mParameters.state = Parameters::DISCONNECTED;
407 }
408
409 mStreamingProcessor->requestExit();
410 mFrameProcessor->requestExit();
411 mCaptureSequencer->requestExit();
412 mJpegProcessor->requestExit();
413 mZslProcessorThread->requestExit();
414 mCallbackProcessor->requestExit();
415
416 ALOGV("Camera %d: Waiting for threads", mCameraId);
417
418 mStreamingProcessor->join();
419 mFrameProcessor->join();
420 mCaptureSequencer->join();
421 mJpegProcessor->join();
422 mZslProcessorThread->join();
423 mCallbackProcessor->join();
424
425 ALOGV("Camera %d: Deleting streams", mCameraId);
426
427 mStreamingProcessor->deletePreviewStream();
428 mStreamingProcessor->deleteRecordingStream();
429 mJpegProcessor->deleteStream();
430 mCallbackProcessor->deleteStream();
431 mZslProcessor->deleteStream();
432
433 ALOGV("Camera %d: Disconnecting device", mCameraId);
434
435 mDevice->disconnect();
436
437 mDevice.clear();
438
439 CameraService::Client::disconnect();
440 }
441
connect(const sp<ICameraClient> & client)442 status_t Camera2Client::connect(const sp<ICameraClient>& client) {
443 ATRACE_CALL();
444 ALOGV("%s: E", __FUNCTION__);
445 Mutex::Autolock icl(mBinderSerializationLock);
446
447 if (mClientPid != 0 && getCallingPid() != mClientPid) {
448 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
449 "current locked to pid %d", __FUNCTION__,
450 mCameraId, getCallingPid(), mClientPid);
451 return BAD_VALUE;
452 }
453
454 mClientPid = getCallingPid();
455
456 mRemoteCallback = client;
457 mSharedCameraCallbacks = client;
458
459 return OK;
460 }
461
lock()462 status_t Camera2Client::lock() {
463 ATRACE_CALL();
464 ALOGV("%s: E", __FUNCTION__);
465 Mutex::Autolock icl(mBinderSerializationLock);
466 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
467 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
468
469 if (mClientPid == 0) {
470 mClientPid = getCallingPid();
471 return OK;
472 }
473
474 if (mClientPid != getCallingPid()) {
475 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
476 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
477 return EBUSY;
478 }
479
480 return OK;
481 }
482
unlock()483 status_t Camera2Client::unlock() {
484 ATRACE_CALL();
485 ALOGV("%s: E", __FUNCTION__);
486 Mutex::Autolock icl(mBinderSerializationLock);
487 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
488 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
489
490 if (mClientPid == getCallingPid()) {
491 SharedParameters::Lock l(mParameters);
492 if (l.mParameters.state == Parameters::RECORD ||
493 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
494 ALOGD("Not allowed to unlock camera during recording.");
495 return INVALID_OPERATION;
496 }
497 mClientPid = 0;
498 mRemoteCallback.clear();
499 mSharedCameraCallbacks.clear();
500 return OK;
501 }
502
503 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
504 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
505 return EBUSY;
506 }
507
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)508 status_t Camera2Client::setPreviewTarget(
509 const sp<IGraphicBufferProducer>& bufferProducer) {
510 ATRACE_CALL();
511 ALOGV("%s: E", __FUNCTION__);
512 Mutex::Autolock icl(mBinderSerializationLock);
513 status_t res;
514 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
515
516 sp<IBinder> binder;
517 sp<ANativeWindow> window;
518 if (bufferProducer != 0) {
519 binder = bufferProducer->asBinder();
520 // Using controlledByApp flag to ensure that the buffer queue remains in
521 // async mode for the old camera API, where many applications depend
522 // on that behavior.
523 window = new Surface(bufferProducer, /*controlledByApp*/ true);
524 }
525 return setPreviewWindowL(binder, window);
526 }
527
setPreviewWindowL(const sp<IBinder> & binder,sp<ANativeWindow> window)528 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
529 sp<ANativeWindow> window) {
530 ATRACE_CALL();
531 status_t res;
532
533 if (binder == mPreviewSurface) {
534 ALOGV("%s: Camera %d: New window is same as old window",
535 __FUNCTION__, mCameraId);
536 return NO_ERROR;
537 }
538
539 Parameters::State state;
540 {
541 SharedParameters::Lock l(mParameters);
542 state = l.mParameters.state;
543 }
544 switch (state) {
545 case Parameters::DISCONNECTED:
546 case Parameters::RECORD:
547 case Parameters::STILL_CAPTURE:
548 case Parameters::VIDEO_SNAPSHOT:
549 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
550 __FUNCTION__, mCameraId,
551 Parameters::getStateName(state));
552 return INVALID_OPERATION;
553 case Parameters::STOPPED:
554 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
555 // OK
556 break;
557 case Parameters::PREVIEW:
558 // Already running preview - need to stop and create a new stream
559 res = stopStream();
560 if (res != OK) {
561 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
562 __FUNCTION__, strerror(-res), res);
563 return res;
564 }
565 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
566 break;
567 }
568
569 mPreviewSurface = binder;
570 res = mStreamingProcessor->setPreviewWindow(window);
571 if (res != OK) {
572 ALOGE("%s: Unable to set new preview window: %s (%d)",
573 __FUNCTION__, strerror(-res), res);
574 return res;
575 }
576
577 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
578 SharedParameters::Lock l(mParameters);
579 l.mParameters.state = state;
580 return startPreviewL(l.mParameters, false);
581 }
582
583 return OK;
584 }
585
setPreviewCallbackFlag(int flag)586 void Camera2Client::setPreviewCallbackFlag(int flag) {
587 ATRACE_CALL();
588 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
589 Mutex::Autolock icl(mBinderSerializationLock);
590
591 if ( checkPid(__FUNCTION__) != OK) return;
592
593 SharedParameters::Lock l(mParameters);
594 setPreviewCallbackFlagL(l.mParameters, flag);
595 }
596
setPreviewCallbackFlagL(Parameters & params,int flag)597 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
598 status_t res = OK;
599
600 switch(params.state) {
601 case Parameters::STOPPED:
602 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
603 case Parameters::PREVIEW:
604 case Parameters::STILL_CAPTURE:
605 // OK
606 break;
607 default:
608 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
609 ALOGE("%s: Camera %d: Can't use preview callbacks "
610 "in state %d", __FUNCTION__, mCameraId, params.state);
611 return;
612 }
613 }
614
615 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
616 ALOGV("%s: setting oneshot", __FUNCTION__);
617 params.previewCallbackOneShot = true;
618 }
619 if (params.previewCallbackFlags != (uint32_t)flag) {
620
621 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
622 // Disable any existing preview callback window when enabling
623 // preview callback flags
624 res = mCallbackProcessor->setCallbackWindow(NULL);
625 if (res != OK) {
626 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
627 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
628 return;
629 }
630 params.previewCallbackSurface = false;
631 }
632
633 params.previewCallbackFlags = flag;
634
635 if (params.state == Parameters::PREVIEW) {
636 res = startPreviewL(params, true);
637 if (res != OK) {
638 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
639 __FUNCTION__, mCameraId,
640 Parameters::getStateName(params.state));
641 }
642 }
643 }
644 }
645
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)646 status_t Camera2Client::setPreviewCallbackTarget(
647 const sp<IGraphicBufferProducer>& callbackProducer) {
648 ATRACE_CALL();
649 ALOGV("%s: E", __FUNCTION__);
650 Mutex::Autolock icl(mBinderSerializationLock);
651 status_t res;
652 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
653
654 sp<ANativeWindow> window;
655 if (callbackProducer != 0) {
656 window = new Surface(callbackProducer);
657 }
658
659 res = mCallbackProcessor->setCallbackWindow(window);
660 if (res != OK) {
661 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
662 __FUNCTION__, mCameraId, strerror(-res), res);
663 return res;
664 }
665
666 SharedParameters::Lock l(mParameters);
667
668 if (window != NULL) {
669 // Disable traditional callbacks when a valid callback target is given
670 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
671 l.mParameters.previewCallbackOneShot = false;
672 l.mParameters.previewCallbackSurface = true;
673 } else {
674 // Disable callback target if given a NULL interface.
675 l.mParameters.previewCallbackSurface = false;
676 }
677
678 switch(l.mParameters.state) {
679 case Parameters::PREVIEW:
680 res = startPreviewL(l.mParameters, true);
681 break;
682 case Parameters::RECORD:
683 case Parameters::VIDEO_SNAPSHOT:
684 res = startRecordingL(l.mParameters, true);
685 break;
686 default:
687 break;
688 }
689 if (res != OK) {
690 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
691 __FUNCTION__, mCameraId,
692 Parameters::getStateName(l.mParameters.state));
693 }
694
695 return OK;
696 }
697
698
startPreview()699 status_t Camera2Client::startPreview() {
700 ATRACE_CALL();
701 ALOGV("%s: E", __FUNCTION__);
702 Mutex::Autolock icl(mBinderSerializationLock);
703 status_t res;
704 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
705 SharedParameters::Lock l(mParameters);
706 return startPreviewL(l.mParameters, false);
707 }
708
startPreviewL(Parameters & params,bool restart)709 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
710 ATRACE_CALL();
711 status_t res;
712
713 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
714
715 if ( (params.state == Parameters::PREVIEW ||
716 params.state == Parameters::RECORD ||
717 params.state == Parameters::VIDEO_SNAPSHOT)
718 && !restart) {
719 // Succeed attempt to re-enter a streaming state
720 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
721 __FUNCTION__, mCameraId);
722 return OK;
723 }
724 if (params.state > Parameters::PREVIEW && !restart) {
725 ALOGE("%s: Can't start preview in state %s",
726 __FUNCTION__,
727 Parameters::getStateName(params.state));
728 return INVALID_OPERATION;
729 }
730
731 if (!mStreamingProcessor->haveValidPreviewWindow()) {
732 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
733 return OK;
734 }
735 params.state = Parameters::STOPPED;
736 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
737
738 res = mStreamingProcessor->updatePreviewStream(params);
739 if (res != OK) {
740 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
741 __FUNCTION__, mCameraId, strerror(-res), res);
742 return res;
743 }
744
745 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
746
747 // We could wait to create the JPEG output stream until first actual use
748 // (first takePicture call). However, this would substantially increase the
749 // first capture latency on HAL3 devices, and potentially on some HAL2
750 // devices. So create it unconditionally at preview start. As a drawback,
751 // this increases gralloc memory consumption for applications that don't
752 // ever take a picture.
753 // TODO: Find a better compromise, though this likely would involve HAL
754 // changes.
755 res = updateProcessorStream(mJpegProcessor, params);
756 if (res != OK) {
757 ALOGE("%s: Camera %d: Can't pre-configure still image "
758 "stream: %s (%d)",
759 __FUNCTION__, mCameraId, strerror(-res), res);
760 return res;
761 }
762
763 Vector<int32_t> outputStreams;
764 bool callbacksEnabled = (params.previewCallbackFlags &
765 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
766 params.previewCallbackSurface;
767
768 if (callbacksEnabled) {
769 // Can't have recording stream hanging around when enabling callbacks,
770 // since it exceeds the max stream count on some devices.
771 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
772 ALOGV("%s: Camera %d: Clearing out recording stream before "
773 "creating callback stream", __FUNCTION__, mCameraId);
774 res = mStreamingProcessor->stopStream();
775 if (res != OK) {
776 ALOGE("%s: Camera %d: Can't stop streaming to delete "
777 "recording stream", __FUNCTION__, mCameraId);
778 return res;
779 }
780 res = mStreamingProcessor->deleteRecordingStream();
781 if (res != OK) {
782 ALOGE("%s: Camera %d: Unable to delete recording stream before "
783 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
784 strerror(-res), res);
785 return res;
786 }
787 }
788
789 res = mCallbackProcessor->updateStream(params);
790 if (res != OK) {
791 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
792 __FUNCTION__, mCameraId, strerror(-res), res);
793 return res;
794 }
795 outputStreams.push(getCallbackStreamId());
796 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
797 /**
798 * Delete the unused callback stream when preview stream is changed and
799 * preview is not enabled. Don't need stop preview stream as preview is in
800 * STOPPED state now.
801 */
802 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
803 res = mCallbackProcessor->deleteStream();
804 if (res != OK) {
805 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
806 __FUNCTION__, mCameraId, strerror(-res), res);
807 return res;
808 }
809 }
810 if (params.zslMode && !params.recordingHint) {
811 res = updateProcessorStream(mZslProcessor, params);
812 if (res != OK) {
813 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
814 __FUNCTION__, mCameraId, strerror(-res), res);
815 return res;
816 }
817 outputStreams.push(getZslStreamId());
818 }
819
820 outputStreams.push(getPreviewStreamId());
821
822 if (!params.recordingHint) {
823 if (!restart) {
824 res = mStreamingProcessor->updatePreviewRequest(params);
825 if (res != OK) {
826 ALOGE("%s: Camera %d: Can't set up preview request: "
827 "%s (%d)", __FUNCTION__, mCameraId,
828 strerror(-res), res);
829 return res;
830 }
831 }
832 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
833 outputStreams);
834 } else {
835 if (!restart) {
836 res = mStreamingProcessor->updateRecordingRequest(params);
837 if (res != OK) {
838 ALOGE("%s: Camera %d: Can't set up preview request with "
839 "record hint: %s (%d)", __FUNCTION__, mCameraId,
840 strerror(-res), res);
841 return res;
842 }
843 }
844 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
845 outputStreams);
846 }
847 if (res != OK) {
848 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
849 __FUNCTION__, mCameraId, strerror(-res), res);
850 return res;
851 }
852
853 params.state = Parameters::PREVIEW;
854 return OK;
855 }
856
stopPreview()857 void Camera2Client::stopPreview() {
858 ATRACE_CALL();
859 ALOGV("%s: E", __FUNCTION__);
860 Mutex::Autolock icl(mBinderSerializationLock);
861 status_t res;
862 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
863 stopPreviewL();
864 }
865
stopPreviewL()866 void Camera2Client::stopPreviewL() {
867 ATRACE_CALL();
868 status_t res;
869 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
870 Parameters::State state;
871 {
872 SharedParameters::Lock l(mParameters);
873 state = l.mParameters.state;
874 }
875
876 switch (state) {
877 case Parameters::DISCONNECTED:
878 // Nothing to do.
879 break;
880 case Parameters::STOPPED:
881 case Parameters::VIDEO_SNAPSHOT:
882 case Parameters::STILL_CAPTURE:
883 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
884 // no break
885 case Parameters::RECORD:
886 case Parameters::PREVIEW:
887 syncWithDevice();
888 res = stopStream();
889 if (res != OK) {
890 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
891 __FUNCTION__, mCameraId, strerror(-res), res);
892 }
893 res = mDevice->waitUntilDrained();
894 if (res != OK) {
895 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
896 __FUNCTION__, mCameraId, strerror(-res), res);
897 }
898 // no break
899 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
900 SharedParameters::Lock l(mParameters);
901 l.mParameters.state = Parameters::STOPPED;
902 commandStopFaceDetectionL(l.mParameters);
903 break;
904 }
905 default:
906 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
907 state);
908 }
909 }
910
previewEnabled()911 bool Camera2Client::previewEnabled() {
912 ATRACE_CALL();
913 Mutex::Autolock icl(mBinderSerializationLock);
914 status_t res;
915 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
916
917 SharedParameters::Lock l(mParameters);
918 return l.mParameters.state == Parameters::PREVIEW;
919 }
920
storeMetaDataInBuffers(bool enabled)921 status_t Camera2Client::storeMetaDataInBuffers(bool enabled) {
922 ATRACE_CALL();
923 Mutex::Autolock icl(mBinderSerializationLock);
924 status_t res;
925 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
926
927 SharedParameters::Lock l(mParameters);
928 switch (l.mParameters.state) {
929 case Parameters::RECORD:
930 case Parameters::VIDEO_SNAPSHOT:
931 ALOGE("%s: Camera %d: Can't be called in state %s",
932 __FUNCTION__, mCameraId,
933 Parameters::getStateName(l.mParameters.state));
934 return INVALID_OPERATION;
935 default:
936 // OK
937 break;
938 }
939
940 l.mParameters.storeMetadataInBuffers = enabled;
941
942 return OK;
943 }
944
startRecording()945 status_t Camera2Client::startRecording() {
946 ATRACE_CALL();
947 ALOGV("%s: E", __FUNCTION__);
948 Mutex::Autolock icl(mBinderSerializationLock);
949 status_t res;
950 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
951 SharedParameters::Lock l(mParameters);
952
953 return startRecordingL(l.mParameters, false);
954 }
955
startRecordingL(Parameters & params,bool restart)956 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
957 status_t res;
958
959 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
960
961 switch (params.state) {
962 case Parameters::STOPPED:
963 res = startPreviewL(params, false);
964 if (res != OK) return res;
965 break;
966 case Parameters::PREVIEW:
967 // Ready to go
968 break;
969 case Parameters::RECORD:
970 case Parameters::VIDEO_SNAPSHOT:
971 // OK to call this when recording is already on, just skip unless
972 // we're looking to restart
973 if (!restart) return OK;
974 break;
975 default:
976 ALOGE("%s: Camera %d: Can't start recording in state %s",
977 __FUNCTION__, mCameraId,
978 Parameters::getStateName(params.state));
979 return INVALID_OPERATION;
980 };
981
982 if (!params.storeMetadataInBuffers) {
983 ALOGE("%s: Camera %d: Recording only supported in metadata mode, but "
984 "non-metadata recording mode requested!", __FUNCTION__,
985 mCameraId);
986 return INVALID_OPERATION;
987 }
988
989 if (!restart) {
990 mCameraService->playSound(CameraService::SOUND_RECORDING);
991 mStreamingProcessor->updateRecordingRequest(params);
992 if (res != OK) {
993 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
994 __FUNCTION__, mCameraId, strerror(-res), res);
995 return res;
996 }
997 }
998
999 // Not all devices can support a preview callback stream and a recording
1000 // stream at the same time, so assume none of them can.
1001 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1002 ALOGV("%s: Camera %d: Clearing out callback stream before "
1003 "creating recording stream", __FUNCTION__, mCameraId);
1004 res = mStreamingProcessor->stopStream();
1005 if (res != OK) {
1006 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1007 __FUNCTION__, mCameraId);
1008 return res;
1009 }
1010 res = mCallbackProcessor->deleteStream();
1011 if (res != OK) {
1012 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1013 "record: %s (%d)", __FUNCTION__, mCameraId,
1014 strerror(-res), res);
1015 return res;
1016 }
1017 }
1018 // Disable callbacks if they're enabled; can't record and use callbacks,
1019 // and we can't fail record start without stagefright asserting.
1020 params.previewCallbackFlags = 0;
1021
1022 res = updateProcessorStream<
1023 StreamingProcessor,
1024 &StreamingProcessor::updateRecordingStream>(mStreamingProcessor,
1025 params);
1026 if (res != OK) {
1027 ALOGE("%s: Camera %d: Unable to update recording stream: %s (%d)",
1028 __FUNCTION__, mCameraId, strerror(-res), res);
1029 return res;
1030 }
1031
1032 Vector<int32_t> outputStreams;
1033 outputStreams.push(getPreviewStreamId());
1034 outputStreams.push(getRecordingStreamId());
1035
1036 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1037 outputStreams);
1038 if (res != OK) {
1039 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1040 __FUNCTION__, mCameraId, strerror(-res), res);
1041 return res;
1042 }
1043
1044 if (params.state < Parameters::RECORD) {
1045 params.state = Parameters::RECORD;
1046 }
1047
1048 return OK;
1049 }
1050
stopRecording()1051 void Camera2Client::stopRecording() {
1052 ATRACE_CALL();
1053 ALOGV("%s: E", __FUNCTION__);
1054 Mutex::Autolock icl(mBinderSerializationLock);
1055 SharedParameters::Lock l(mParameters);
1056
1057 status_t res;
1058 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1059
1060 switch (l.mParameters.state) {
1061 case Parameters::RECORD:
1062 // OK to stop
1063 break;
1064 case Parameters::STOPPED:
1065 case Parameters::PREVIEW:
1066 case Parameters::STILL_CAPTURE:
1067 case Parameters::VIDEO_SNAPSHOT:
1068 default:
1069 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1070 __FUNCTION__, mCameraId,
1071 Parameters::getStateName(l.mParameters.state));
1072 return;
1073 };
1074
1075 mCameraService->playSound(CameraService::SOUND_RECORDING);
1076
1077 res = startPreviewL(l.mParameters, true);
1078 if (res != OK) {
1079 ALOGE("%s: Camera %d: Unable to return to preview",
1080 __FUNCTION__, mCameraId);
1081 }
1082 }
1083
recordingEnabled()1084 bool Camera2Client::recordingEnabled() {
1085 ATRACE_CALL();
1086 Mutex::Autolock icl(mBinderSerializationLock);
1087
1088 if ( checkPid(__FUNCTION__) != OK) return false;
1089
1090 return recordingEnabledL();
1091 }
1092
recordingEnabledL()1093 bool Camera2Client::recordingEnabledL() {
1094 ATRACE_CALL();
1095 SharedParameters::Lock l(mParameters);
1096
1097 return (l.mParameters.state == Parameters::RECORD
1098 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1099 }
1100
releaseRecordingFrame(const sp<IMemory> & mem)1101 void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
1102 ATRACE_CALL();
1103 Mutex::Autolock icl(mBinderSerializationLock);
1104 if ( checkPid(__FUNCTION__) != OK) return;
1105
1106 mStreamingProcessor->releaseRecordingFrame(mem);
1107 }
1108
autoFocus()1109 status_t Camera2Client::autoFocus() {
1110 ATRACE_CALL();
1111 Mutex::Autolock icl(mBinderSerializationLock);
1112 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1113 status_t res;
1114 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1115
1116 int triggerId;
1117 bool notifyImmediately = false;
1118 bool notifySuccess = false;
1119 {
1120 SharedParameters::Lock l(mParameters);
1121 if (l.mParameters.state < Parameters::PREVIEW) {
1122 return INVALID_OPERATION;
1123 }
1124
1125 /**
1126 * If the camera does not support auto-focus, it is a no-op and
1127 * onAutoFocus(boolean, Camera) callback will be called immediately
1128 * with a fake value of success set to true.
1129 *
1130 * Similarly, if focus mode is set to INFINITY, there's no reason to
1131 * bother the HAL.
1132 */
1133 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1134 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1135 notifyImmediately = true;
1136 notifySuccess = true;
1137 }
1138 /**
1139 * If we're in CAF mode, and AF has already been locked, just fire back
1140 * the callback right away; the HAL would not send a notification since
1141 * no state change would happen on a AF trigger.
1142 */
1143 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1144 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1145 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1146 notifyImmediately = true;
1147 notifySuccess = true;
1148 }
1149 /**
1150 * Send immediate notification back to client
1151 */
1152 if (notifyImmediately) {
1153 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1154 if (l.mRemoteCallback != 0) {
1155 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1156 notifySuccess ? 1 : 0, 0);
1157 }
1158 return OK;
1159 }
1160 /**
1161 * Handle quirk mode for AF in scene modes
1162 */
1163 if (l.mParameters.quirks.triggerAfWithAuto &&
1164 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED &&
1165 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1166 !l.mParameters.focusingAreas[0].isEmpty()) {
1167 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1168 __FUNCTION__, l.mParameters.focusMode);
1169 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1170 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1171 updateRequests(l.mParameters);
1172 }
1173
1174 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1175 triggerId = l.mParameters.currentAfTriggerId;
1176 }
1177 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1178
1179 syncWithDevice();
1180
1181 mDevice->triggerAutofocus(triggerId);
1182
1183 return OK;
1184 }
1185
cancelAutoFocus()1186 status_t Camera2Client::cancelAutoFocus() {
1187 ATRACE_CALL();
1188 Mutex::Autolock icl(mBinderSerializationLock);
1189 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1190 status_t res;
1191 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1192
1193 int triggerId;
1194 {
1195 SharedParameters::Lock l(mParameters);
1196 // Canceling does nothing in FIXED or INFINITY modes
1197 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1198 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1199 return OK;
1200 }
1201
1202 // An active AF trigger is canceled
1203 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1204 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1205 }
1206
1207 triggerId = ++l.mParameters.afTriggerCounter;
1208
1209 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1210 // the real state at this point. No need to cancel explicitly if
1211 // changing the AF mode.
1212 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1213 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1214 l.mParameters.shadowFocusMode);
1215 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1216 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1217 updateRequests(l.mParameters);
1218
1219 return OK;
1220 }
1221 }
1222 syncWithDevice();
1223
1224 mDevice->triggerCancelAutofocus(triggerId);
1225
1226 return OK;
1227 }
1228
takePicture(int msgType)1229 status_t Camera2Client::takePicture(int msgType) {
1230 ATRACE_CALL();
1231 Mutex::Autolock icl(mBinderSerializationLock);
1232 status_t res;
1233 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1234
1235 int takePictureCounter;
1236 {
1237 SharedParameters::Lock l(mParameters);
1238 switch (l.mParameters.state) {
1239 case Parameters::DISCONNECTED:
1240 case Parameters::STOPPED:
1241 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1242 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1243 __FUNCTION__, mCameraId);
1244 return INVALID_OPERATION;
1245 case Parameters::PREVIEW:
1246 // Good to go for takePicture
1247 res = commandStopFaceDetectionL(l.mParameters);
1248 if (res != OK) {
1249 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1250 __FUNCTION__, mCameraId);
1251 return res;
1252 }
1253 l.mParameters.state = Parameters::STILL_CAPTURE;
1254 break;
1255 case Parameters::RECORD:
1256 // Good to go for video snapshot
1257 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1258 break;
1259 case Parameters::STILL_CAPTURE:
1260 case Parameters::VIDEO_SNAPSHOT:
1261 ALOGE("%s: Camera %d: Already taking a picture",
1262 __FUNCTION__, mCameraId);
1263 return INVALID_OPERATION;
1264 }
1265
1266 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1267
1268 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1269 if (res != OK) {
1270 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1271 __FUNCTION__, mCameraId, strerror(-res), res);
1272 return res;
1273 }
1274 takePictureCounter = ++l.mParameters.takePictureCounter;
1275 }
1276
1277 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1278
1279 // Need HAL to have correct settings before (possibly) triggering precapture
1280 syncWithDevice();
1281
1282 res = mCaptureSequencer->startCapture(msgType);
1283 if (res != OK) {
1284 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1285 __FUNCTION__, mCameraId, strerror(-res), res);
1286 }
1287
1288 return res;
1289 }
1290
setParameters(const String8 & params)1291 status_t Camera2Client::setParameters(const String8& params) {
1292 ATRACE_CALL();
1293 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1294 Mutex::Autolock icl(mBinderSerializationLock);
1295 status_t res;
1296 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1297
1298 SharedParameters::Lock l(mParameters);
1299
1300 res = l.mParameters.set(params);
1301 if (res != OK) return res;
1302
1303 res = updateRequests(l.mParameters);
1304
1305 return res;
1306 }
1307
getParameters() const1308 String8 Camera2Client::getParameters() const {
1309 ATRACE_CALL();
1310 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1311 Mutex::Autolock icl(mBinderSerializationLock);
1312 if ( checkPid(__FUNCTION__) != OK) return String8();
1313
1314 SharedParameters::ReadLock l(mParameters);
1315
1316 return l.mParameters.get();
1317 }
1318
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1319 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1320 ATRACE_CALL();
1321 Mutex::Autolock icl(mBinderSerializationLock);
1322 status_t res;
1323 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1324
1325 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1326 cmd, arg1, arg2);
1327
1328 switch (cmd) {
1329 case CAMERA_CMD_START_SMOOTH_ZOOM:
1330 return commandStartSmoothZoomL();
1331 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1332 return commandStopSmoothZoomL();
1333 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1334 return commandSetDisplayOrientationL(arg1);
1335 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1336 return commandEnableShutterSoundL(arg1 == 1);
1337 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1338 return commandPlayRecordingSoundL();
1339 case CAMERA_CMD_START_FACE_DETECTION:
1340 return commandStartFaceDetectionL(arg1);
1341 case CAMERA_CMD_STOP_FACE_DETECTION: {
1342 SharedParameters::Lock l(mParameters);
1343 return commandStopFaceDetectionL(l.mParameters);
1344 }
1345 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1346 return commandEnableFocusMoveMsgL(arg1 == 1);
1347 case CAMERA_CMD_PING:
1348 return commandPingL();
1349 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1350 return commandSetVideoBufferCountL(arg1);
1351 default:
1352 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1353 __FUNCTION__, cmd, arg1, arg2);
1354 return BAD_VALUE;
1355 }
1356 }
1357
commandStartSmoothZoomL()1358 status_t Camera2Client::commandStartSmoothZoomL() {
1359 ALOGE("%s: Unimplemented!", __FUNCTION__);
1360 return OK;
1361 }
1362
commandStopSmoothZoomL()1363 status_t Camera2Client::commandStopSmoothZoomL() {
1364 ALOGE("%s: Unimplemented!", __FUNCTION__);
1365 return OK;
1366 }
1367
commandSetDisplayOrientationL(int degrees)1368 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1369 int transform = Parameters::degToTransform(degrees,
1370 mCameraFacing == CAMERA_FACING_FRONT);
1371 if (transform == -1) {
1372 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1373 __FUNCTION__, mCameraId, degrees);
1374 return BAD_VALUE;
1375 }
1376 SharedParameters::Lock l(mParameters);
1377 if (transform != l.mParameters.previewTransform &&
1378 getPreviewStreamId() != NO_STREAM) {
1379 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1380 }
1381 l.mParameters.previewTransform = transform;
1382 return OK;
1383 }
1384
commandEnableShutterSoundL(bool enable)1385 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1386 SharedParameters::Lock l(mParameters);
1387 if (enable) {
1388 l.mParameters.playShutterSound = true;
1389 return OK;
1390 }
1391
1392 // Disabling shutter sound may not be allowed. In that case only
1393 // allow the mediaserver process to disable the sound.
1394 char value[PROPERTY_VALUE_MAX];
1395 property_get("ro.camera.sound.forced", value, "0");
1396 if (strncmp(value, "0", 2) != 0) {
1397 // Disabling shutter sound is not allowed. Deny if the current
1398 // process is not mediaserver.
1399 if (getCallingPid() != getpid()) {
1400 ALOGE("Failed to disable shutter sound. Permission denied (pid %d)",
1401 getCallingPid());
1402 return PERMISSION_DENIED;
1403 }
1404 }
1405
1406 l.mParameters.playShutterSound = false;
1407 return OK;
1408 }
1409
commandPlayRecordingSoundL()1410 status_t Camera2Client::commandPlayRecordingSoundL() {
1411 mCameraService->playSound(CameraService::SOUND_RECORDING);
1412 return OK;
1413 }
1414
commandStartFaceDetectionL(int)1415 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1416 ALOGV("%s: Camera %d: Starting face detection",
1417 __FUNCTION__, mCameraId);
1418 status_t res;
1419 SharedParameters::Lock l(mParameters);
1420 switch (l.mParameters.state) {
1421 case Parameters::DISCONNECTED:
1422 case Parameters::STOPPED:
1423 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1424 case Parameters::STILL_CAPTURE:
1425 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1426 __FUNCTION__, mCameraId);
1427 return INVALID_OPERATION;
1428 case Parameters::PREVIEW:
1429 case Parameters::RECORD:
1430 case Parameters::VIDEO_SNAPSHOT:
1431 // Good to go for starting face detect
1432 break;
1433 }
1434 // Ignoring type
1435 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1436 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1437 ALOGE("%s: Camera %d: Face detection not supported",
1438 __FUNCTION__, mCameraId);
1439 return BAD_VALUE;
1440 }
1441 if (l.mParameters.enableFaceDetect) return OK;
1442
1443 l.mParameters.enableFaceDetect = true;
1444
1445 res = updateRequests(l.mParameters);
1446
1447 return res;
1448 }
1449
commandStopFaceDetectionL(Parameters & params)1450 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1451 status_t res = OK;
1452 ALOGV("%s: Camera %d: Stopping face detection",
1453 __FUNCTION__, mCameraId);
1454
1455 if (!params.enableFaceDetect) return OK;
1456
1457 params.enableFaceDetect = false;
1458
1459 if (params.state == Parameters::PREVIEW
1460 || params.state == Parameters::RECORD
1461 || params.state == Parameters::VIDEO_SNAPSHOT) {
1462 res = updateRequests(params);
1463 }
1464
1465 return res;
1466 }
1467
commandEnableFocusMoveMsgL(bool enable)1468 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1469 SharedParameters::Lock l(mParameters);
1470 l.mParameters.enableFocusMoveMessages = enable;
1471
1472 return OK;
1473 }
1474
commandPingL()1475 status_t Camera2Client::commandPingL() {
1476 // Always ping back if access is proper and device is alive
1477 SharedParameters::Lock l(mParameters);
1478 if (l.mParameters.state != Parameters::DISCONNECTED) {
1479 return OK;
1480 } else {
1481 return NO_INIT;
1482 }
1483 }
1484
commandSetVideoBufferCountL(size_t count)1485 status_t Camera2Client::commandSetVideoBufferCountL(size_t count) {
1486 if (recordingEnabledL()) {
1487 ALOGE("%s: Camera %d: Error setting video buffer count after "
1488 "recording was started", __FUNCTION__, mCameraId);
1489 return INVALID_OPERATION;
1490 }
1491
1492 return mStreamingProcessor->setRecordingBufferCount(count);
1493 }
1494
1495 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1496 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1497 ALOGV("%s: Autofocus state now %d, last trigger %d",
1498 __FUNCTION__, newState, triggerId);
1499 bool sendCompletedMessage = false;
1500 bool sendMovingMessage = false;
1501
1502 bool success = false;
1503 bool afInMotion = false;
1504 {
1505 SharedParameters::Lock l(mParameters);
1506 // Trace end of AF state
1507 char tmp[32];
1508 if (l.mParameters.afStateCounter > 0) {
1509 camera_metadata_enum_snprint(
1510 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1511 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1512 }
1513
1514 // Update state
1515 l.mParameters.focusState = newState;
1516 l.mParameters.afStateCounter++;
1517
1518 // Trace start of AF state
1519
1520 camera_metadata_enum_snprint(
1521 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1522 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1523
1524 switch (l.mParameters.focusMode) {
1525 case Parameters::FOCUS_MODE_AUTO:
1526 case Parameters::FOCUS_MODE_MACRO:
1527 // Don't send notifications upstream if they're not for the current AF
1528 // trigger. For example, if cancel was called in between, or if we
1529 // already sent a notification about this AF call.
1530 if (triggerId != l.mParameters.currentAfTriggerId) break;
1531 switch (newState) {
1532 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1533 success = true;
1534 // no break
1535 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1536 sendCompletedMessage = true;
1537 l.mParameters.currentAfTriggerId = -1;
1538 break;
1539 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1540 // Just starting focusing, ignore
1541 break;
1542 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1543 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1544 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1545 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1546 default:
1547 // Unexpected in AUTO/MACRO mode
1548 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1549 __FUNCTION__, newState);
1550 break;
1551 }
1552 break;
1553 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1554 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1555 switch (newState) {
1556 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1557 success = true;
1558 // no break
1559 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1560 // Don't send notifications upstream if they're not for
1561 // the current AF trigger. For example, if cancel was
1562 // called in between, or if we already sent a
1563 // notification about this AF call.
1564 // Send both a 'AF done' callback and a 'AF move' callback
1565 if (triggerId != l.mParameters.currentAfTriggerId) break;
1566 sendCompletedMessage = true;
1567 afInMotion = false;
1568 if (l.mParameters.enableFocusMoveMessages &&
1569 l.mParameters.afInMotion) {
1570 sendMovingMessage = true;
1571 }
1572 l.mParameters.currentAfTriggerId = -1;
1573 break;
1574 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1575 // Cancel was called, or we switched state; care if
1576 // currently moving
1577 afInMotion = false;
1578 if (l.mParameters.enableFocusMoveMessages &&
1579 l.mParameters.afInMotion) {
1580 sendMovingMessage = true;
1581 }
1582 break;
1583 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1584 // Start passive scan, inform upstream
1585 afInMotion = true;
1586 // no break
1587 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1588 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1589 // Stop passive scan, inform upstream
1590 if (l.mParameters.enableFocusMoveMessages) {
1591 sendMovingMessage = true;
1592 }
1593 break;
1594 }
1595 l.mParameters.afInMotion = afInMotion;
1596 break;
1597 case Parameters::FOCUS_MODE_EDOF:
1598 case Parameters::FOCUS_MODE_INFINITY:
1599 case Parameters::FOCUS_MODE_FIXED:
1600 default:
1601 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1602 ALOGE("%s: Unexpected AF state change %d "
1603 "(ID %d) in focus mode %d",
1604 __FUNCTION__, newState, triggerId,
1605 l.mParameters.focusMode);
1606 }
1607 }
1608 }
1609 if (sendMovingMessage) {
1610 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1611 if (l.mRemoteCallback != 0) {
1612 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1613 afInMotion ? 1 : 0, 0);
1614 }
1615 }
1616 if (sendCompletedMessage) {
1617 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1618 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1619 if (l.mRemoteCallback != 0) {
1620 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1621 success ? 1 : 0, 0);
1622 }
1623 }
1624 }
1625
notifyAutoExposure(uint8_t newState,int triggerId)1626 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
1627 ALOGV("%s: Autoexposure state now %d, last trigger %d",
1628 __FUNCTION__, newState, triggerId);
1629 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
1630 }
1631
getParameters()1632 camera2::SharedParameters& Camera2Client::getParameters() {
1633 return mParameters;
1634 }
1635
getPreviewStreamId() const1636 int Camera2Client::getPreviewStreamId() const {
1637 return mStreamingProcessor->getPreviewStreamId();
1638 }
1639
getCaptureStreamId() const1640 int Camera2Client::getCaptureStreamId() const {
1641 return mJpegProcessor->getStreamId();
1642 }
1643
getCallbackStreamId() const1644 int Camera2Client::getCallbackStreamId() const {
1645 return mCallbackProcessor->getStreamId();
1646 }
1647
getRecordingStreamId() const1648 int Camera2Client::getRecordingStreamId() const {
1649 return mStreamingProcessor->getRecordingStreamId();
1650 }
1651
getZslStreamId() const1652 int Camera2Client::getZslStreamId() const {
1653 return mZslProcessor->getStreamId();
1654 }
1655
registerFrameListener(int32_t minId,int32_t maxId,wp<camera2::FrameProcessor::FilteredListener> listener)1656 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
1657 wp<camera2::FrameProcessor::FilteredListener> listener) {
1658 return mFrameProcessor->registerListener(minId, maxId, listener);
1659 }
1660
removeFrameListener(int32_t minId,int32_t maxId,wp<camera2::FrameProcessor::FilteredListener> listener)1661 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
1662 wp<camera2::FrameProcessor::FilteredListener> listener) {
1663 return mFrameProcessor->removeListener(minId, maxId, listener);
1664 }
1665
stopStream()1666 status_t Camera2Client::stopStream() {
1667 return mStreamingProcessor->stopStream();
1668 }
1669
1670 const int32_t Camera2Client::kPreviewRequestIdStart;
1671 const int32_t Camera2Client::kPreviewRequestIdEnd;
1672 const int32_t Camera2Client::kRecordingRequestIdStart;
1673 const int32_t Camera2Client::kRecordingRequestIdEnd;
1674 const int32_t Camera2Client::kCaptureRequestIdStart;
1675 const int32_t Camera2Client::kCaptureRequestIdEnd;
1676
1677 /** Utility methods */
1678
updateRequests(Parameters & params)1679 status_t Camera2Client::updateRequests(Parameters ¶ms) {
1680 status_t res;
1681
1682 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
1683
1684 res = mStreamingProcessor->incrementStreamingIds();
1685 if (res != OK) {
1686 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
1687 __FUNCTION__, mCameraId, strerror(-res), res);
1688 return res;
1689 }
1690
1691 res = mStreamingProcessor->updatePreviewRequest(params);
1692 if (res != OK) {
1693 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
1694 __FUNCTION__, mCameraId, strerror(-res), res);
1695 return res;
1696 }
1697 res = mStreamingProcessor->updateRecordingRequest(params);
1698 if (res != OK) {
1699 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1700 __FUNCTION__, mCameraId, strerror(-res), res);
1701 return res;
1702 }
1703
1704 if (params.state == Parameters::PREVIEW) {
1705 res = startPreviewL(params, true);
1706 if (res != OK) {
1707 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
1708 __FUNCTION__, mCameraId, strerror(-res), res);
1709 return res;
1710 }
1711 } else if (params.state == Parameters::RECORD ||
1712 params.state == Parameters::VIDEO_SNAPSHOT) {
1713 res = startRecordingL(params, true);
1714 if (res != OK) {
1715 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
1716 __FUNCTION__, mCameraId, strerror(-res), res);
1717 return res;
1718 }
1719 }
1720 return res;
1721 }
1722
1723
calculateBufferSize(int width,int height,int format,int stride)1724 size_t Camera2Client::calculateBufferSize(int width, int height,
1725 int format, int stride) {
1726 switch (format) {
1727 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
1728 return width * height * 2;
1729 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
1730 return width * height * 3 / 2;
1731 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
1732 return width * height * 2;
1733 case HAL_PIXEL_FORMAT_YV12: { // YV12
1734 size_t ySize = stride * height;
1735 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
1736 size_t uvSize = uvStride * height / 2;
1737 return ySize + uvSize * 2;
1738 }
1739 case HAL_PIXEL_FORMAT_RGB_565:
1740 return width * height * 2;
1741 case HAL_PIXEL_FORMAT_RGBA_8888:
1742 return width * height * 4;
1743 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1744 return width * height * 2;
1745 default:
1746 ALOGE("%s: Unknown preview format: %x",
1747 __FUNCTION__, format);
1748 return 0;
1749 }
1750 }
1751
syncWithDevice()1752 status_t Camera2Client::syncWithDevice() {
1753 ATRACE_CALL();
1754 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
1755 status_t res;
1756
1757 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
1758 if (activeRequestId == 0) return OK;
1759
1760 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
1761 if (res == TIMED_OUT) {
1762 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
1763 __FUNCTION__, mCameraId);
1764 } else if (res != OK) {
1765 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
1766 __FUNCTION__, mCameraId);
1767 }
1768 return res;
1769 }
1770
1771 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)1772 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
1773 camera2::Parameters params) {
1774 // No default template arguments until C++11, so we need this overload
1775 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
1776 processor, params);
1777 }
1778
1779 template <typename ProcessorT,
1780 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)1781 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
1782 Parameters params) {
1783 status_t res;
1784
1785 // Get raw pointer since sp<T> doesn't have operator->*
1786 ProcessorT *processorPtr = processor.get();
1787 res = (processorPtr->*updateStreamF)(params);
1788
1789 /**
1790 * Can't update the stream if it's busy?
1791 *
1792 * Then we need to stop the device (by temporarily clearing the request
1793 * queue) and then try again. Resume streaming once we're done.
1794 */
1795 if (res == -EBUSY) {
1796 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
1797 mCameraId);
1798 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1799 if (res != OK) {
1800 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1801 __FUNCTION__, mCameraId, strerror(-res), res);
1802 }
1803
1804 res = mDevice->waitUntilDrained();
1805 if (res != OK) {
1806 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1807 __FUNCTION__, mCameraId, strerror(-res), res);
1808 }
1809
1810 res = (processorPtr->*updateStreamF)(params);
1811 if (res != OK) {
1812 ALOGE("%s: Camera %d: Failed to update processing stream "
1813 " despite having halted streaming first: %s (%d)",
1814 __FUNCTION__, mCameraId, strerror(-res), res);
1815 }
1816
1817 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1818 if (res != OK) {
1819 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1820 __FUNCTION__, mCameraId, strerror(-res), res);
1821 }
1822 }
1823
1824 return res;
1825 }
1826
1827 const char* Camera2Client::kAutofocusLabel = "autofocus";
1828 const char* Camera2Client::kTakepictureLabel = "take_picture";
1829
1830 } // namespace android
1831