1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <inttypes.h>
22 #include <utils/Log.h>
23 #include <utils/Trace.h>
24
25 #include <cutils/properties.h>
26 #include <gui/Surface.h>
27 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
28
29 #include "api1/Camera2Client.h"
30
31 #include "api1/client2/StreamingProcessor.h"
32 #include "api1/client2/JpegProcessor.h"
33 #include "api1/client2/CaptureSequencer.h"
34 #include "api1/client2/CallbackProcessor.h"
35 #include "api1/client2/ZslProcessor.h"
36 #include "utils/CameraThreadState.h"
37 #include "utils/CameraServiceProxyWrapper.h"
38
39 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
40 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
41
42 #ifndef FALLTHROUGH_INTENDED
43 #define FALLTHROUGH_INTENDED [[fallthrough]]
44 #endif
45
46 namespace android {
47 using namespace camera2;
48
49 // Interface used by CameraService
50
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,const String16 & clientPackageName,const std::optional<String16> & clientFeatureId,const String8 & cameraDeviceId,int api1CameraId,int cameraFacing,int sensorOrientation,int clientPid,uid_t clientUid,int servicePid,bool overrideForPerfClass)51 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
52 const sp<hardware::ICameraClient>& cameraClient,
53 const String16& clientPackageName,
54 const std::optional<String16>& clientFeatureId,
55 const String8& cameraDeviceId,
56 int api1CameraId,
57 int cameraFacing,
58 int sensorOrientation,
59 int clientPid,
60 uid_t clientUid,
61 int servicePid,
62 bool overrideForPerfClass):
63 Camera2ClientBase(cameraService, cameraClient, clientPackageName, clientFeatureId,
64 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation,
65 clientPid, clientUid, servicePid, overrideForPerfClass),
66 mParameters(api1CameraId, cameraFacing)
67 {
68 ATRACE_CALL();
69
70 SharedParameters::Lock l(mParameters);
71 l.mParameters.state = Parameters::DISCONNECTED;
72 }
73
initialize(sp<CameraProviderManager> manager,const String8 & monitorTags)74 status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
75 return initializeImpl(manager, monitorTags);
76 }
77
isZslEnabledInStillTemplate()78 bool Camera2Client::isZslEnabledInStillTemplate() {
79 bool zslEnabled = false;
80 CameraMetadata stillTemplate;
81 status_t res = mDevice->createDefaultRequest(
82 camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
83 if (res == OK) {
84 camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
85 if (enableZsl.count == 1) {
86 zslEnabled = (enableZsl.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE);
87 }
88 }
89
90 return zslEnabled;
91 }
92
93 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr,const String8 & monitorTags)94 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
95 {
96 ATRACE_CALL();
97 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
98 status_t res;
99
100 res = Camera2ClientBase::initialize(providerPtr, monitorTags);
101 if (res != OK) {
102 return res;
103 }
104
105 {
106 SharedParameters::Lock l(mParameters);
107
108 res = l.mParameters.initialize(mDevice.get(), mDeviceVersion);
109 if (res != OK) {
110 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
111 __FUNCTION__, mCameraId, strerror(-res), res);
112 return NO_INIT;
113 }
114
115 l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
116 }
117
118 String8 threadName;
119
120 mStreamingProcessor = new StreamingProcessor(this);
121 threadName = String8::format("C2-%d-StreamProc",
122 mCameraId);
123
124 mFrameProcessor = new FrameProcessor(mDevice, this);
125 threadName = String8::format("C2-%d-FrameProc",
126 mCameraId);
127 mFrameProcessor->run(threadName.string());
128
129 mCaptureSequencer = new CaptureSequencer(this);
130 threadName = String8::format("C2-%d-CaptureSeq",
131 mCameraId);
132 mCaptureSequencer->run(threadName.string());
133
134 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
135 threadName = String8::format("C2-%d-JpegProc",
136 mCameraId);
137 mJpegProcessor->run(threadName.string());
138
139 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
140
141 threadName = String8::format("C2-%d-ZslProc",
142 mCameraId);
143 mZslProcessor->run(threadName.string());
144
145 mCallbackProcessor = new CallbackProcessor(this);
146 threadName = String8::format("C2-%d-CallbkProc",
147 mCameraId);
148 mCallbackProcessor->run(threadName.string());
149
150 if (gLogLevel >= 1) {
151 SharedParameters::Lock l(mParameters);
152 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
153 mCameraId);
154 ALOGD("%s", l.mParameters.paramsFlattened.string());
155 }
156
157 return OK;
158 }
159
~Camera2Client()160 Camera2Client::~Camera2Client() {
161 ATRACE_CALL();
162 ALOGV("~Camera2Client");
163
164 mDestructionStarted = true;
165
166 disconnect();
167
168 ALOGI("Camera %d: Closed", mCameraId);
169 }
170
dump(int fd,const Vector<String16> & args)171 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
172 return BasicClient::dump(fd, args);
173 }
174
dumpClient(int fd,const Vector<String16> & args)175 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
176 String8 result;
177 result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
178 (getRemoteCallback() != NULL ?
179 (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
180 mClientPid);
181 result.append(" State: ");
182 #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
183
184 const Parameters& p = mParameters.unsafeAccess();
185
186 result.append(Parameters::getStateName(p.state));
187
188 result.append("\n Current parameters:\n");
189 result.appendFormat(" Preview size: %d x %d\n",
190 p.previewWidth, p.previewHeight);
191 result.appendFormat(" Preview FPS range: %d - %d\n",
192 p.previewFpsRange[0], p.previewFpsRange[1]);
193 result.appendFormat(" Preview HAL pixel format: 0x%x\n",
194 p.previewFormat);
195 result.appendFormat(" Preview transform: %x\n",
196 p.previewTransform);
197 result.appendFormat(" Picture size: %d x %d\n",
198 p.pictureWidth, p.pictureHeight);
199 result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
200 p.jpegThumbSize[0], p.jpegThumbSize[1]);
201 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
202 p.jpegQuality, p.jpegThumbQuality);
203 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
204 result.appendFormat(" GPS tags %s\n",
205 p.gpsEnabled ? "enabled" : "disabled");
206 if (p.gpsEnabled) {
207 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
208 p.gpsCoordinates[0], p.gpsCoordinates[1],
209 p.gpsCoordinates[2]);
210 result.appendFormat(" GPS timestamp: %" PRId64 "\n",
211 p.gpsTimestamp);
212 result.appendFormat(" GPS processing method: %s\n",
213 p.gpsProcessingMethod.string());
214 }
215
216 result.append(" White balance mode: ");
217 switch (p.wbMode) {
218 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
219 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
220 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
221 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
222 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
223 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
224 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
225 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
226 default: result.append("UNKNOWN\n");
227 }
228
229 result.append(" Effect mode: ");
230 switch (p.effectMode) {
231 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
232 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
233 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
234 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
235 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
236 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
237 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
238 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
239 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
240 default: result.append("UNKNOWN\n");
241 }
242
243 result.append(" Antibanding mode: ");
244 switch (p.antibandingMode) {
245 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
246 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
247 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
248 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
249 default: result.append("UNKNOWN\n");
250 }
251
252 result.append(" Scene mode: ");
253 switch (p.sceneMode) {
254 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
255 result.append("AUTO\n"); break;
256 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
257 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
258 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
259 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
260 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
261 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
262 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
263 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
264 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
265 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
266 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
267 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
268 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
269 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
270 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
271 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
272 default: result.append("UNKNOWN\n");
273 }
274
275 result.append(" Flash mode: ");
276 switch (p.flashMode) {
277 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
278 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
279 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
280 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
281 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
282 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
283 default: result.append("UNKNOWN\n");
284 }
285
286 result.append(" Focus mode: ");
287 switch (p.focusMode) {
288 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
289 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
290 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
291 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
292 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
293 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
294 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
295 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
296 default: result.append("UNKNOWN\n");
297 }
298
299 result.append(" Focus state: ");
300 switch (p.focusState) {
301 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
302 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
303 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
304 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
305 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
306 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
307 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
308 default: result.append("UNKNOWN\n");
309 }
310
311 result.append(" Focusing areas:\n");
312 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
313 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
314 p.focusingAreas[i].left,
315 p.focusingAreas[i].top,
316 p.focusingAreas[i].right,
317 p.focusingAreas[i].bottom,
318 p.focusingAreas[i].weight);
319 }
320
321 result.appendFormat(" Exposure compensation index: %d\n",
322 p.exposureCompensation);
323
324 result.appendFormat(" AE lock %s, AWB lock %s\n",
325 p.autoExposureLock ? "enabled" : "disabled",
326 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
327
328 result.appendFormat(" Metering areas:\n");
329 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
330 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
331 p.meteringAreas[i].left,
332 p.meteringAreas[i].top,
333 p.meteringAreas[i].right,
334 p.meteringAreas[i].bottom,
335 p.meteringAreas[i].weight);
336 }
337
338 result.appendFormat(" Zoom index: %d\n", p.zoom);
339 result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
340 p.videoHeight);
341
342 result.appendFormat(" Recording hint is %s\n",
343 p.recordingHint ? "set" : "not set");
344
345 result.appendFormat(" Video stabilization is %s\n",
346 p.videoStabilization ? "enabled" : "disabled");
347
348 result.appendFormat(" Selected still capture FPS range: %d - %d\n",
349 p.fastInfo.bestStillCaptureFpsRange[0],
350 p.fastInfo.bestStillCaptureFpsRange[1]);
351
352 result.appendFormat(" Use zero shutter lag: %s\n",
353 p.useZeroShutterLag() ? "yes" : "no");
354
355 result.append(" Current streams:\n");
356 result.appendFormat(" Preview stream ID: %d\n",
357 getPreviewStreamId());
358 result.appendFormat(" Capture stream ID: %d\n",
359 getCaptureStreamId());
360 result.appendFormat(" Recording stream ID: %d\n",
361 getRecordingStreamId());
362
363 result.append(" Quirks for this camera:\n");
364 bool haveQuirk = false;
365 if (p.quirks.triggerAfWithAuto) {
366 result.appendFormat(" triggerAfWithAuto\n");
367 haveQuirk = true;
368 }
369 if (p.quirks.useZslFormat) {
370 result.appendFormat(" useZslFormat\n");
371 haveQuirk = true;
372 }
373 if (p.quirks.meteringCropRegion) {
374 result.appendFormat(" meteringCropRegion\n");
375 haveQuirk = true;
376 }
377 if (p.quirks.partialResults) {
378 result.appendFormat(" usePartialResult\n");
379 haveQuirk = true;
380 }
381 if (!haveQuirk) {
382 result.appendFormat(" none\n");
383 }
384
385 write(fd, result.string(), result.size());
386
387 mStreamingProcessor->dump(fd, args);
388
389 mCaptureSequencer->dump(fd, args);
390
391 mFrameProcessor->dump(fd, args);
392
393 mZslProcessor->dump(fd, args);
394
395 return dumpDevice(fd, args);
396 #undef CASE_APPEND_ENUM
397 }
398
399 // ICamera interface
400
disconnect()401 binder::Status Camera2Client::disconnect() {
402 ATRACE_CALL();
403 nsecs_t startTime = systemTime();
404 Mutex::Autolock icl(mBinderSerializationLock);
405
406 binder::Status res = binder::Status::ok();
407 // Allow both client and the cameraserver to disconnect at all times
408 int callingPid = CameraThreadState::getCallingPid();
409 if (callingPid != mClientPid && callingPid != mServicePid) return res;
410
411 if (mDevice == 0) return res;
412
413 ALOGV("Camera %d: Shutting down", mCameraId);
414
415 /**
416 * disconnect() cannot call any methods that might need to promote a
417 * wp<Camera2Client>, since disconnect can be called from the destructor, at
418 * which point all such promotions will fail.
419 */
420
421 stopPreviewL();
422
423 {
424 SharedParameters::Lock l(mParameters);
425 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
426 l.mParameters.state = Parameters::DISCONNECTED;
427 }
428
429 mFrameProcessor->requestExit();
430 mCaptureSequencer->requestExit();
431 mJpegProcessor->requestExit();
432 mZslProcessor->requestExit();
433 mCallbackProcessor->requestExit();
434
435 ALOGV("Camera %d: Waiting for threads", mCameraId);
436
437 {
438 // Don't wait with lock held, in case the other threads need to
439 // complete callbacks that re-enter Camera2Client
440 mBinderSerializationLock.unlock();
441
442 mFrameProcessor->join();
443 mCaptureSequencer->join();
444 mJpegProcessor->join();
445 mZslProcessor->join();
446 mCallbackProcessor->join();
447
448 mBinderSerializationLock.lock();
449 }
450
451 ALOGV("Camera %d: Deleting streams", mCameraId);
452
453 mStreamingProcessor->deletePreviewStream();
454 mStreamingProcessor->deleteRecordingStream();
455 mJpegProcessor->deleteStream();
456 mCallbackProcessor->deleteStream();
457 mZslProcessor->deleteStream();
458
459 ALOGV("Camera %d: Disconnecting device", mCameraId);
460
461 mDevice->disconnect();
462
463 CameraService::Client::disconnect();
464
465 int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
466 CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
467
468 return res;
469 }
470
connect(const sp<hardware::ICameraClient> & client)471 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
472 ATRACE_CALL();
473 ALOGV("%s: E", __FUNCTION__);
474 Mutex::Autolock icl(mBinderSerializationLock);
475
476 if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
477 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
478 "current locked to pid %d", __FUNCTION__,
479 mCameraId, CameraThreadState::getCallingPid(), mClientPid);
480 return BAD_VALUE;
481 }
482
483 mClientPid = CameraThreadState::getCallingPid();
484
485 mRemoteCallback = client;
486 mSharedCameraCallbacks = client;
487
488 return OK;
489 }
490
lock()491 status_t Camera2Client::lock() {
492 ATRACE_CALL();
493 ALOGV("%s: E", __FUNCTION__);
494 Mutex::Autolock icl(mBinderSerializationLock);
495 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
496 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
497
498 if (mClientPid == 0) {
499 mClientPid = CameraThreadState::getCallingPid();
500 return OK;
501 }
502
503 if (mClientPid != CameraThreadState::getCallingPid()) {
504 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
505 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
506 return EBUSY;
507 }
508
509 return OK;
510 }
511
unlock()512 status_t Camera2Client::unlock() {
513 ATRACE_CALL();
514 ALOGV("%s: E", __FUNCTION__);
515 Mutex::Autolock icl(mBinderSerializationLock);
516 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
517 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
518
519 if (mClientPid == CameraThreadState::getCallingPid()) {
520 SharedParameters::Lock l(mParameters);
521 if (l.mParameters.state == Parameters::RECORD ||
522 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
523 ALOGD("Not allowed to unlock camera during recording.");
524 return INVALID_OPERATION;
525 }
526 mClientPid = 0;
527 mRemoteCallback.clear();
528 mSharedCameraCallbacks.clear();
529 return OK;
530 }
531
532 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
533 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
534 return EBUSY;
535 }
536
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)537 status_t Camera2Client::setPreviewTarget(
538 const sp<IGraphicBufferProducer>& bufferProducer) {
539 ATRACE_CALL();
540 ALOGV("%s: E", __FUNCTION__);
541 Mutex::Autolock icl(mBinderSerializationLock);
542 status_t res;
543 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
544
545 sp<IBinder> binder;
546 sp<Surface> window;
547 if (bufferProducer != 0) {
548 binder = IInterface::asBinder(bufferProducer);
549 // Using controlledByApp flag to ensure that the buffer queue remains in
550 // async mode for the old camera API, where many applications depend
551 // on that behavior.
552 window = new Surface(bufferProducer, /*controlledByApp*/ true);
553 }
554 return setPreviewWindowL(binder, window);
555 }
556
setPreviewWindowL(const sp<IBinder> & binder,const sp<Surface> & window)557 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
558 const sp<Surface>& window) {
559 ATRACE_CALL();
560 status_t res;
561
562 if (binder == mPreviewSurface) {
563 ALOGV("%s: Camera %d: New window is same as old window",
564 __FUNCTION__, mCameraId);
565 return NO_ERROR;
566 }
567
568 Parameters::State state;
569 {
570 SharedParameters::Lock l(mParameters);
571 state = l.mParameters.state;
572 }
573 switch (state) {
574 case Parameters::DISCONNECTED:
575 case Parameters::RECORD:
576 case Parameters::STILL_CAPTURE:
577 case Parameters::VIDEO_SNAPSHOT:
578 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
579 __FUNCTION__, mCameraId,
580 Parameters::getStateName(state));
581 return INVALID_OPERATION;
582 case Parameters::STOPPED:
583 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
584 // OK
585 break;
586 case Parameters::PREVIEW:
587 // Already running preview - need to stop and create a new stream
588 res = stopStream();
589 if (res != OK) {
590 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
591 __FUNCTION__, strerror(-res), res);
592 return res;
593 }
594 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
595 break;
596 }
597
598 mPreviewSurface = binder;
599 res = mStreamingProcessor->setPreviewWindow(window);
600 if (res != OK) {
601 ALOGE("%s: Unable to set new preview window: %s (%d)",
602 __FUNCTION__, strerror(-res), res);
603 return res;
604 }
605
606 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
607 SharedParameters::Lock l(mParameters);
608 l.mParameters.state = state;
609 return startPreviewL(l.mParameters, false);
610 }
611
612 return OK;
613 }
614
setPreviewCallbackFlag(int flag)615 void Camera2Client::setPreviewCallbackFlag(int flag) {
616 ATRACE_CALL();
617 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
618 Mutex::Autolock icl(mBinderSerializationLock);
619
620 if ( checkPid(__FUNCTION__) != OK) return;
621
622 SharedParameters::Lock l(mParameters);
623 setPreviewCallbackFlagL(l.mParameters, flag);
624 }
625
setPreviewCallbackFlagL(Parameters & params,int flag)626 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
627 status_t res = OK;
628
629 switch(params.state) {
630 case Parameters::STOPPED:
631 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
632 case Parameters::PREVIEW:
633 case Parameters::STILL_CAPTURE:
634 // OK
635 break;
636 default:
637 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
638 ALOGE("%s: Camera %d: Can't use preview callbacks "
639 "in state %d", __FUNCTION__, mCameraId, params.state);
640 return;
641 }
642 }
643
644 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
645 ALOGV("%s: setting oneshot", __FUNCTION__);
646 params.previewCallbackOneShot = true;
647 }
648 if (params.previewCallbackFlags != (uint32_t)flag) {
649
650 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
651 // Disable any existing preview callback window when enabling
652 // preview callback flags
653 res = mCallbackProcessor->setCallbackWindow(NULL);
654 if (res != OK) {
655 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
656 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
657 return;
658 }
659 params.previewCallbackSurface = false;
660 }
661
662 params.previewCallbackFlags = flag;
663
664 if (params.state == Parameters::PREVIEW) {
665 res = startPreviewL(params, true);
666 if (res != OK) {
667 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
668 __FUNCTION__, mCameraId,
669 Parameters::getStateName(params.state));
670 }
671 }
672 }
673 }
674
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)675 status_t Camera2Client::setPreviewCallbackTarget(
676 const sp<IGraphicBufferProducer>& callbackProducer) {
677 ATRACE_CALL();
678 ALOGV("%s: E", __FUNCTION__);
679 Mutex::Autolock icl(mBinderSerializationLock);
680 status_t res;
681 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
682
683 sp<Surface> window;
684 if (callbackProducer != 0) {
685 window = new Surface(callbackProducer);
686 }
687
688 res = mCallbackProcessor->setCallbackWindow(window);
689 if (res != OK) {
690 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
691 __FUNCTION__, mCameraId, strerror(-res), res);
692 return res;
693 }
694
695 SharedParameters::Lock l(mParameters);
696
697 if (window != NULL) {
698 // Disable traditional callbacks when a valid callback target is given
699 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
700 l.mParameters.previewCallbackOneShot = false;
701 l.mParameters.previewCallbackSurface = true;
702 } else {
703 // Disable callback target if given a NULL interface.
704 l.mParameters.previewCallbackSurface = false;
705 }
706
707 switch(l.mParameters.state) {
708 case Parameters::PREVIEW:
709 res = startPreviewL(l.mParameters, true);
710 break;
711 case Parameters::RECORD:
712 case Parameters::VIDEO_SNAPSHOT:
713 res = startRecordingL(l.mParameters, true);
714 break;
715 default:
716 break;
717 }
718 if (res != OK) {
719 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
720 __FUNCTION__, mCameraId,
721 Parameters::getStateName(l.mParameters.state));
722 }
723
724 return OK;
725 }
726
727
startPreview()728 status_t Camera2Client::startPreview() {
729 ATRACE_CALL();
730 ALOGV("%s: E", __FUNCTION__);
731 Mutex::Autolock icl(mBinderSerializationLock);
732 status_t res;
733 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
734 SharedParameters::Lock l(mParameters);
735 return startPreviewL(l.mParameters, false);
736 }
737
startPreviewL(Parameters & params,bool restart)738 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
739 ATRACE_CALL();
740 status_t res;
741
742 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
743
744 if (params.state == Parameters::DISCONNECTED) {
745 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
746 return INVALID_OPERATION;
747 }
748 if ( (params.state == Parameters::PREVIEW ||
749 params.state == Parameters::RECORD ||
750 params.state == Parameters::VIDEO_SNAPSHOT)
751 && !restart) {
752 // Succeed attempt to re-enter a streaming state
753 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
754 __FUNCTION__, mCameraId);
755 return OK;
756 }
757 if (params.state > Parameters::PREVIEW && !restart) {
758 ALOGE("%s: Can't start preview in state %s",
759 __FUNCTION__,
760 Parameters::getStateName(params.state));
761 return INVALID_OPERATION;
762 }
763
764 if (!mStreamingProcessor->haveValidPreviewWindow()) {
765 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
766 return OK;
767 }
768 params.state = Parameters::STOPPED;
769 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
770
771 res = mStreamingProcessor->updatePreviewStream(params);
772 if (res != OK) {
773 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
774 __FUNCTION__, mCameraId, strerror(-res), res);
775 return res;
776 }
777
778 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
779
780 // We could wait to create the JPEG output stream until first actual use
781 // (first takePicture call). However, this would substantially increase the
782 // first capture latency on HAL3 devices.
783 // So create it unconditionally at preview start. As a drawback,
784 // this increases gralloc memory consumption for applications that don't
785 // ever take a picture. Do not enter this mode when jpeg stream will slow
786 // down preview.
787 // TODO: Find a better compromise, though this likely would involve HAL
788 // changes.
789 int lastJpegStreamId = mJpegProcessor->getStreamId();
790 // If jpeg stream will slow down preview, make sure we remove it before starting preview
791 if (params.slowJpegMode) {
792 if (lastJpegStreamId != NO_STREAM) {
793 // Pause preview if we are streaming
794 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
795 if (activeRequestId != 0) {
796 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
797 if (res != OK) {
798 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
799 __FUNCTION__, mCameraId, strerror(-res), res);
800 }
801 res = mDevice->waitUntilDrained();
802 if (res != OK) {
803 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
804 __FUNCTION__, mCameraId, strerror(-res), res);
805 }
806 }
807
808 res = mJpegProcessor->deleteStream();
809
810 if (res != OK) {
811 ALOGE("%s: Camera %d: delete Jpeg stream failed: %s (%d)",
812 __FUNCTION__, mCameraId, strerror(-res), res);
813 }
814
815 if (activeRequestId != 0) {
816 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
817 if (res != OK) {
818 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
819 __FUNCTION__, mCameraId, strerror(-res), res);
820 }
821 }
822 }
823 } else {
824 res = updateProcessorStream(mJpegProcessor, params);
825 if (res != OK) {
826 ALOGE("%s: Camera %d: Can't pre-configure still image "
827 "stream: %s (%d)",
828 __FUNCTION__, mCameraId, strerror(-res), res);
829 return res;
830 }
831 }
832 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
833
834 Vector<int32_t> outputStreams;
835 bool callbacksEnabled = (params.previewCallbackFlags &
836 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
837 params.previewCallbackSurface;
838
839 if (callbacksEnabled) {
840 // Can't have recording stream hanging around when enabling callbacks,
841 // since it exceeds the max stream count on some devices.
842 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
843 ALOGV("%s: Camera %d: Clearing out recording stream before "
844 "creating callback stream", __FUNCTION__, mCameraId);
845 res = mStreamingProcessor->stopStream();
846 if (res != OK) {
847 ALOGE("%s: Camera %d: Can't stop streaming to delete "
848 "recording stream", __FUNCTION__, mCameraId);
849 return res;
850 }
851 res = mStreamingProcessor->deleteRecordingStream();
852 if (res != OK) {
853 ALOGE("%s: Camera %d: Unable to delete recording stream before "
854 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
855 strerror(-res), res);
856 return res;
857 }
858 }
859
860 res = mCallbackProcessor->updateStream(params);
861 if (res != OK) {
862 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
863 __FUNCTION__, mCameraId, strerror(-res), res);
864 return res;
865 }
866 outputStreams.push(getCallbackStreamId());
867 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
868 /**
869 * Delete the unused callback stream when preview stream is changed and
870 * preview is not enabled. Don't need stop preview stream as preview is in
871 * STOPPED state now.
872 */
873 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
874 res = mCallbackProcessor->deleteStream();
875 if (res != OK) {
876 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
877 __FUNCTION__, mCameraId, strerror(-res), res);
878 return res;
879 }
880 }
881
882 if (params.useZeroShutterLag() &&
883 getRecordingStreamId() == NO_STREAM) {
884 res = updateProcessorStream(mZslProcessor, params);
885 if (res != OK) {
886 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
887 __FUNCTION__, mCameraId, strerror(-res), res);
888 return res;
889 }
890
891 if (jpegStreamChanged) {
892 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
893 __FUNCTION__, mCameraId);
894 mZslProcessor->clearZslQueue();
895 }
896 outputStreams.push(getZslStreamId());
897 } else {
898 mZslProcessor->deleteStream();
899 }
900
901 outputStreams.push(getPreviewStreamId());
902
903 if (params.isDeviceZslSupported) {
904 // If device ZSL is supported, resume preview buffers that may be paused
905 // during last takePicture().
906 mDevice->dropStreamBuffers(false, getPreviewStreamId());
907 }
908
909 if (!params.recordingHint) {
910 if (!restart) {
911 res = mStreamingProcessor->updatePreviewRequest(params);
912 if (res != OK) {
913 ALOGE("%s: Camera %d: Can't set up preview request: "
914 "%s (%d)", __FUNCTION__, mCameraId,
915 strerror(-res), res);
916 return res;
917 }
918 }
919 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
920 outputStreams);
921 } else {
922 if (!restart) {
923 res = mStreamingProcessor->updateRecordingRequest(params);
924 if (res != OK) {
925 ALOGE("%s: Camera %d: Can't set up preview request with "
926 "record hint: %s (%d)", __FUNCTION__, mCameraId,
927 strerror(-res), res);
928 return res;
929 }
930 }
931 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
932 outputStreams);
933 }
934 if (res != OK) {
935 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
936 __FUNCTION__, mCameraId, strerror(-res), res);
937 return res;
938 }
939
940 mCallbackProcessor->unpauseCallback();
941 params.state = Parameters::PREVIEW;
942 return OK;
943 }
944
stopPreview()945 void Camera2Client::stopPreview() {
946 ATRACE_CALL();
947 ALOGV("%s: E", __FUNCTION__);
948 Mutex::Autolock icl(mBinderSerializationLock);
949 status_t res;
950 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
951 stopPreviewL();
952 }
953
stopPreviewL()954 void Camera2Client::stopPreviewL() {
955 ATRACE_CALL();
956 status_t res;
957 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
958 Parameters::State state;
959 {
960 SharedParameters::Lock l(mParameters);
961 state = l.mParameters.state;
962 }
963
964 switch (state) {
965 case Parameters::DISCONNECTED:
966 // Nothing to do.
967 break;
968 case Parameters::STOPPED:
969 case Parameters::VIDEO_SNAPSHOT:
970 case Parameters::STILL_CAPTURE:
971 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
972 FALLTHROUGH_INTENDED;
973 case Parameters::RECORD:
974 case Parameters::PREVIEW:
975 mCallbackProcessor->pauseCallback();
976 syncWithDevice();
977 // Due to flush a camera device sync is not a sufficient
978 // guarantee that the current client parameters are
979 // correctly applied. To resolve this wait for the current
980 // request id to return in the results.
981 waitUntilCurrentRequestIdLocked();
982 res = stopStream();
983 if (res != OK) {
984 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
985 __FUNCTION__, mCameraId, strerror(-res), res);
986 }
987
988 // Flush all in-process captures and buffer in order to stop
989 // preview faster.
990 res = mDevice->flush();
991 if (res != OK) {
992 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
993 __FUNCTION__, mCameraId, strerror(-res), res);
994 }
995
996 res = mDevice->waitUntilDrained();
997 if (res != OK) {
998 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
999 __FUNCTION__, mCameraId, strerror(-res), res);
1000 }
1001 // Clean up recording stream
1002 res = mStreamingProcessor->deleteRecordingStream();
1003 if (res != OK) {
1004 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1005 "stop preview: %s (%d)",
1006 __FUNCTION__, mCameraId, strerror(-res), res);
1007 }
1008 FALLTHROUGH_INTENDED;
1009 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
1010 SharedParameters::Lock l(mParameters);
1011 l.mParameters.state = Parameters::STOPPED;
1012 commandStopFaceDetectionL(l.mParameters);
1013 break;
1014 }
1015 default:
1016 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
1017 state);
1018 }
1019 }
1020
previewEnabled()1021 bool Camera2Client::previewEnabled() {
1022 ATRACE_CALL();
1023 Mutex::Autolock icl(mBinderSerializationLock);
1024 status_t res;
1025 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
1026
1027 SharedParameters::Lock l(mParameters);
1028 return l.mParameters.state == Parameters::PREVIEW;
1029 }
1030
setVideoBufferMode(int32_t videoBufferMode)1031 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
1032 ATRACE_CALL();
1033 Mutex::Autolock icl(mBinderSerializationLock);
1034 status_t res;
1035 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1036
1037 SharedParameters::Lock l(mParameters);
1038 switch (l.mParameters.state) {
1039 case Parameters::RECORD:
1040 case Parameters::VIDEO_SNAPSHOT:
1041 ALOGE("%s: Camera %d: Can't be called in state %s",
1042 __FUNCTION__, mCameraId,
1043 Parameters::getStateName(l.mParameters.state));
1044 return INVALID_OPERATION;
1045 default:
1046 // OK
1047 break;
1048 }
1049
1050 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1051 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
1052 return BAD_VALUE;
1053 }
1054
1055 l.mParameters.videoBufferMode = videoBufferMode;
1056
1057 return OK;
1058 }
1059
startRecording()1060 status_t Camera2Client::startRecording() {
1061 ATRACE_CALL();
1062 ALOGV("%s: E", __FUNCTION__);
1063 Mutex::Autolock icl(mBinderSerializationLock);
1064 status_t res;
1065 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1066 SharedParameters::Lock l(mParameters);
1067
1068 return startRecordingL(l.mParameters, false);
1069 }
1070
startRecordingL(Parameters & params,bool restart)1071 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1072 status_t res = OK;
1073
1074 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1075
1076 switch (params.state) {
1077 case Parameters::STOPPED:
1078 res = startPreviewL(params, false);
1079 if (res != OK) return res;
1080 // Make sure first preview request is submitted to the HAL device to avoid
1081 // two consecutive set of configure_streams being called into the HAL.
1082 // TODO: Refactor this to avoid initial preview configuration.
1083 syncWithDevice();
1084 break;
1085 case Parameters::PREVIEW:
1086 // Ready to go
1087 break;
1088 case Parameters::RECORD:
1089 case Parameters::VIDEO_SNAPSHOT:
1090 // OK to call this when recording is already on, just skip unless
1091 // we're looking to restart
1092 if (!restart) return OK;
1093 break;
1094 default:
1095 ALOGE("%s: Camera %d: Can't start recording in state %s",
1096 __FUNCTION__, mCameraId,
1097 Parameters::getStateName(params.state));
1098 return INVALID_OPERATION;
1099 };
1100
1101 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1102 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1103 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1104 return INVALID_OPERATION;
1105 }
1106
1107 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1108 ALOGE("%s: No valid recording window", __FUNCTION__);
1109 return INVALID_OPERATION;
1110 }
1111
1112 if (!restart) {
1113 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1114 mStreamingProcessor->updateRecordingRequest(params);
1115 if (res != OK) {
1116 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1117 __FUNCTION__, mCameraId, strerror(-res), res);
1118 return res;
1119 }
1120 }
1121
1122 // Not all devices can support a preview callback stream and a recording
1123 // stream at the same time, so assume none of them can.
1124 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1125 ALOGV("%s: Camera %d: Clearing out callback stream before "
1126 "creating recording stream", __FUNCTION__, mCameraId);
1127 res = mStreamingProcessor->stopStream();
1128 if (res != OK) {
1129 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1130 __FUNCTION__, mCameraId);
1131 return res;
1132 }
1133 res = mCallbackProcessor->deleteStream();
1134 if (res != OK) {
1135 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1136 "record: %s (%d)", __FUNCTION__, mCameraId,
1137 strerror(-res), res);
1138 return res;
1139 }
1140 }
1141
1142 // Clean up ZSL before transitioning into recording
1143 if (mZslProcessor->getStreamId() != NO_STREAM) {
1144 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1145 "creating recording stream", __FUNCTION__, mCameraId);
1146 res = mStreamingProcessor->stopStream();
1147 if (res != OK) {
1148 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1149 __FUNCTION__, mCameraId);
1150 return res;
1151 }
1152 res = mDevice->waitUntilDrained();
1153 if (res != OK) {
1154 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1155 __FUNCTION__, mCameraId, strerror(-res), res);
1156 }
1157 res = mZslProcessor->clearZslQueue();
1158 if (res != OK) {
1159 ALOGE("%s: Camera %d: Can't clear zsl queue",
1160 __FUNCTION__, mCameraId);
1161 return res;
1162 }
1163 res = mZslProcessor->deleteStream();
1164 if (res != OK) {
1165 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1166 "record: %s (%d)", __FUNCTION__, mCameraId,
1167 strerror(-res), res);
1168 return res;
1169 }
1170 }
1171
1172 // Disable callbacks if they're enabled; can't record and use callbacks,
1173 // and we can't fail record start without stagefright asserting.
1174 params.previewCallbackFlags = 0;
1175
1176 // May need to reconfigure video snapshot JPEG sizes
1177 // during recording startup, so need a more complex sequence here to
1178 // ensure an early stream reconfiguration doesn't happen
1179 bool recordingStreamNeedsUpdate;
1180 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1181 if (res != OK) {
1182 ALOGE("%s: Camera %d: Can't query recording stream",
1183 __FUNCTION__, mCameraId);
1184 return res;
1185 }
1186
1187 if (recordingStreamNeedsUpdate) {
1188 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1189 // Right now camera device cannot handle configureStream failure gracefully
1190 // when device is streaming
1191 res = mStreamingProcessor->stopStream();
1192 if (res != OK) {
1193 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1194 "stream", __FUNCTION__, mCameraId);
1195 return res;
1196 }
1197 res = mDevice->waitUntilDrained();
1198 if (res != OK) {
1199 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1200 "%s (%d)", __FUNCTION__, mCameraId,
1201 strerror(-res), res);
1202 }
1203
1204 res = updateProcessorStream<
1205 StreamingProcessor,
1206 &StreamingProcessor::updateRecordingStream>(
1207 mStreamingProcessor,
1208 params);
1209 if (res != OK) {
1210 ALOGE("%s: Camera %d: Unable to update recording stream: "
1211 "%s (%d)", __FUNCTION__, mCameraId,
1212 strerror(-res), res);
1213 return res;
1214 }
1215 }
1216
1217 Vector<int32_t> outputStreams;
1218 outputStreams.push(getPreviewStreamId());
1219 outputStreams.push(getRecordingStreamId());
1220
1221 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1222 outputStreams);
1223
1224 // startStream might trigger a configureStream call and device might fail
1225 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1226 // to video size.
1227 if (res == BAD_VALUE) {
1228 overrideVideoSnapshotSize(params);
1229 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1230 outputStreams);
1231 }
1232
1233 if (res != OK) {
1234 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1235 __FUNCTION__, mCameraId, strerror(-res), res);
1236 return res;
1237 }
1238
1239 if (params.state < Parameters::RECORD) {
1240 params.state = Parameters::RECORD;
1241 }
1242
1243 return OK;
1244 }
1245
stopRecording()1246 void Camera2Client::stopRecording() {
1247 ATRACE_CALL();
1248 ALOGV("%s: E", __FUNCTION__);
1249 Mutex::Autolock icl(mBinderSerializationLock);
1250 SharedParameters::Lock l(mParameters);
1251
1252 status_t res;
1253 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1254
1255 switch (l.mParameters.state) {
1256 case Parameters::RECORD:
1257 // OK to stop
1258 break;
1259 case Parameters::STOPPED:
1260 case Parameters::PREVIEW:
1261 case Parameters::STILL_CAPTURE:
1262 case Parameters::VIDEO_SNAPSHOT:
1263 default:
1264 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1265 __FUNCTION__, mCameraId,
1266 Parameters::getStateName(l.mParameters.state));
1267 return;
1268 };
1269
1270 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1271
1272 // Remove recording stream because the video target may be abandoned soon.
1273 res = stopStream();
1274 if (res != OK) {
1275 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1276 __FUNCTION__, mCameraId, strerror(-res), res);
1277 }
1278
1279 res = mDevice->waitUntilDrained();
1280 if (res != OK) {
1281 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1282 __FUNCTION__, mCameraId, strerror(-res), res);
1283 }
1284 // Clean up recording stream
1285 res = mStreamingProcessor->deleteRecordingStream();
1286 if (res != OK) {
1287 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1288 "stop preview: %s (%d)",
1289 __FUNCTION__, mCameraId, strerror(-res), res);
1290 }
1291 l.mParameters.recoverOverriddenJpegSize();
1292
1293 // Restart preview
1294 res = startPreviewL(l.mParameters, true);
1295 if (res != OK) {
1296 ALOGE("%s: Camera %d: Unable to return to preview",
1297 __FUNCTION__, mCameraId);
1298 }
1299 }
1300
recordingEnabled()1301 bool Camera2Client::recordingEnabled() {
1302 ATRACE_CALL();
1303 Mutex::Autolock icl(mBinderSerializationLock);
1304
1305 if ( checkPid(__FUNCTION__) != OK) return false;
1306
1307 return recordingEnabledL();
1308 }
1309
recordingEnabledL()1310 bool Camera2Client::recordingEnabledL() {
1311 ATRACE_CALL();
1312 SharedParameters::Lock l(mParameters);
1313
1314 return (l.mParameters.state == Parameters::RECORD
1315 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1316 }
1317
releaseRecordingFrame(const sp<IMemory> & mem)1318 void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
1319 (void)mem;
1320 ATRACE_CALL();
1321 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1322 }
1323
releaseRecordingFrameHandle(native_handle_t * handle)1324 void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
1325 (void)handle;
1326 ATRACE_CALL();
1327 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1328 }
1329
releaseRecordingFrameHandleBatch(const std::vector<native_handle_t * > & handles)1330 void Camera2Client::releaseRecordingFrameHandleBatch(
1331 const std::vector<native_handle_t*>& handles) {
1332 (void)handles;
1333 ATRACE_CALL();
1334 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1335 }
1336
autoFocus()1337 status_t Camera2Client::autoFocus() {
1338 ATRACE_CALL();
1339 Mutex::Autolock icl(mBinderSerializationLock);
1340 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1341 status_t res;
1342 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1343
1344 int triggerId;
1345 bool notifyImmediately = false;
1346 bool notifySuccess = false;
1347 {
1348 SharedParameters::Lock l(mParameters);
1349 if (l.mParameters.state < Parameters::PREVIEW) {
1350 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1351 __FUNCTION__, mCameraId, l.mParameters.state);
1352 return INVALID_OPERATION;
1353 }
1354
1355 /**
1356 * If the camera does not support auto-focus, it is a no-op and
1357 * onAutoFocus(boolean, Camera) callback will be called immediately
1358 * with a fake value of success set to true.
1359 *
1360 * Similarly, if focus mode is set to INFINITY, there's no reason to
1361 * bother the HAL.
1362 */
1363 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1364 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1365 notifyImmediately = true;
1366 notifySuccess = true;
1367 }
1368 /**
1369 * If we're in CAF mode, and AF has already been locked, just fire back
1370 * the callback right away; the HAL would not send a notification since
1371 * no state change would happen on a AF trigger.
1372 */
1373 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1374 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1375 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1376 notifyImmediately = true;
1377 notifySuccess = true;
1378 }
1379 /**
1380 * Send immediate notification back to client
1381 */
1382 if (notifyImmediately) {
1383 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1384 if (l.mRemoteCallback != 0) {
1385 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1386 notifySuccess ? 1 : 0, 0);
1387 }
1388 return OK;
1389 }
1390 /**
1391 * Handle quirk mode for AF in scene modes
1392 */
1393 if (l.mParameters.quirks.triggerAfWithAuto &&
1394 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1395 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1396 !l.mParameters.focusingAreas[0].isEmpty()) {
1397 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1398 __FUNCTION__, l.mParameters.focusMode);
1399 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1400 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1401 updateRequests(l.mParameters);
1402 }
1403
1404 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1405 triggerId = l.mParameters.currentAfTriggerId;
1406 }
1407 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1408
1409 syncWithDevice();
1410
1411 mDevice->triggerAutofocus(triggerId);
1412
1413 return OK;
1414 }
1415
cancelAutoFocus()1416 status_t Camera2Client::cancelAutoFocus() {
1417 ATRACE_CALL();
1418 Mutex::Autolock icl(mBinderSerializationLock);
1419 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1420 status_t res;
1421 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1422
1423 int triggerId;
1424 {
1425 SharedParameters::Lock l(mParameters);
1426 // Canceling does nothing in FIXED or INFINITY modes
1427 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1428 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1429 return OK;
1430 }
1431
1432 // An active AF trigger is canceled
1433 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1434 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1435 }
1436
1437 triggerId = ++l.mParameters.afTriggerCounter;
1438
1439 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1440 // the real state at this point. No need to cancel explicitly if
1441 // changing the AF mode.
1442 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1443 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1444 l.mParameters.shadowFocusMode);
1445 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1446 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1447 updateRequests(l.mParameters);
1448
1449 return OK;
1450 }
1451 if (l.mParameters.allowZslMode) {
1452 mZslProcessor->clearZslQueue();
1453 }
1454 }
1455 syncWithDevice();
1456
1457 mDevice->triggerCancelAutofocus(triggerId);
1458
1459 return OK;
1460 }
1461
takePicture(int)1462 status_t Camera2Client::takePicture(int /*msgType*/) {
1463 ATRACE_CALL();
1464 Mutex::Autolock icl(mBinderSerializationLock);
1465 status_t res;
1466 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1467
1468 int takePictureCounter;
1469 bool shouldSyncWithDevice = true;
1470 {
1471 SharedParameters::Lock l(mParameters);
1472 switch (l.mParameters.state) {
1473 case Parameters::DISCONNECTED:
1474 case Parameters::STOPPED:
1475 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1476 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1477 __FUNCTION__, mCameraId);
1478 return INVALID_OPERATION;
1479 case Parameters::PREVIEW:
1480 // Good to go for takePicture
1481 res = commandStopFaceDetectionL(l.mParameters);
1482 if (res != OK) {
1483 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1484 __FUNCTION__, mCameraId);
1485 return res;
1486 }
1487 l.mParameters.state = Parameters::STILL_CAPTURE;
1488
1489 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1490 if (l.mParameters.isJpegSizeOverridden() &&
1491 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1492 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1493 if (res != OK) {
1494 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1495 __FUNCTION__, mCameraId, strerror(-res), res);
1496 }
1497 res = mDevice->waitUntilDrained();
1498 if (res != OK) {
1499 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1500 __FUNCTION__, mCameraId, strerror(-res), res);
1501 }
1502 // Clean up recording stream
1503 res = mStreamingProcessor->deleteRecordingStream();
1504 if (res != OK) {
1505 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1506 "stop preview: %s (%d)",
1507 __FUNCTION__, mCameraId, strerror(-res), res);
1508 }
1509 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1510 if (res != OK) {
1511 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1512 __FUNCTION__, mCameraId, strerror(-res), res);
1513 }
1514 l.mParameters.recoverOverriddenJpegSize();
1515 }
1516 break;
1517 case Parameters::RECORD:
1518 // Good to go for video snapshot
1519 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1520 break;
1521 case Parameters::STILL_CAPTURE:
1522 case Parameters::VIDEO_SNAPSHOT:
1523 ALOGE("%s: Camera %d: Already taking a picture",
1524 __FUNCTION__, mCameraId);
1525 return INVALID_OPERATION;
1526 }
1527
1528 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1529 int lastJpegStreamId = mJpegProcessor->getStreamId();
1530 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1531 if (!l.mParameters.slowJpegMode) {
1532 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1533 }
1534
1535 // If video snapshot fail to configureStream, try override video snapshot size to
1536 // video size
1537 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1538 overrideVideoSnapshotSize(l.mParameters);
1539 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1540 }
1541 if (res != OK) {
1542 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1543 __FUNCTION__, mCameraId, strerror(-res), res);
1544 return res;
1545 }
1546 takePictureCounter = ++l.mParameters.takePictureCounter;
1547
1548 // Clear ZSL buffer queue when Jpeg size is changed.
1549 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1550 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1551 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1552 __FUNCTION__, mCameraId);
1553 mZslProcessor->clearZslQueue();
1554 }
1555
1556 // We should always sync with the device in case flash is turned on,
1557 // the camera device suggests that flash is needed (AE state FLASH_REQUIRED)
1558 // or we are in some other AE state different from CONVERGED that may need
1559 // precapture trigger.
1560 if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON &&
1561 (l.mParameters.aeState == ANDROID_CONTROL_AE_STATE_CONVERGED)) {
1562 shouldSyncWithDevice = false;
1563 }
1564 }
1565
1566 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1567
1568 // Make sure HAL has correct settings in case precapture trigger is needed.
1569 if (shouldSyncWithDevice) {
1570 syncWithDevice();
1571 }
1572
1573 res = mCaptureSequencer->startCapture();
1574 if (res != OK) {
1575 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1576 __FUNCTION__, mCameraId, strerror(-res), res);
1577 }
1578
1579 return res;
1580 }
1581
setParameters(const String8 & params)1582 status_t Camera2Client::setParameters(const String8& params) {
1583 ATRACE_CALL();
1584 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1585 Mutex::Autolock icl(mBinderSerializationLock);
1586 status_t res;
1587 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1588
1589 SharedParameters::Lock l(mParameters);
1590
1591 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1592 res = l.mParameters.set(params);
1593 if (res != OK) return res;
1594 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1595
1596 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1597 mZslProcessor->clearZslQueue();
1598 }
1599
1600 res = updateRequests(l.mParameters);
1601
1602 return res;
1603 }
1604
getParameters() const1605 String8 Camera2Client::getParameters() const {
1606 ATRACE_CALL();
1607 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1608 Mutex::Autolock icl(mBinderSerializationLock);
1609 // The camera service can unconditionally get the parameters at all times
1610 if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1611
1612 SharedParameters::ReadLock l(mParameters);
1613
1614 return l.mParameters.get();
1615 }
1616
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1617 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1618 ATRACE_CALL();
1619 Mutex::Autolock icl(mBinderSerializationLock);
1620 status_t res;
1621 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1622
1623 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1624 cmd, arg1, arg2);
1625
1626 switch (cmd) {
1627 case CAMERA_CMD_START_SMOOTH_ZOOM:
1628 return commandStartSmoothZoomL();
1629 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1630 return commandStopSmoothZoomL();
1631 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1632 return commandSetDisplayOrientationL(arg1);
1633 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1634 return commandEnableShutterSoundL(arg1 == 1);
1635 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1636 return commandPlayRecordingSoundL();
1637 case CAMERA_CMD_START_FACE_DETECTION:
1638 return commandStartFaceDetectionL(arg1);
1639 case CAMERA_CMD_STOP_FACE_DETECTION: {
1640 SharedParameters::Lock l(mParameters);
1641 return commandStopFaceDetectionL(l.mParameters);
1642 }
1643 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1644 return commandEnableFocusMoveMsgL(arg1 == 1);
1645 case CAMERA_CMD_PING:
1646 return commandPingL();
1647 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1648 case CAMERA_CMD_SET_VIDEO_FORMAT:
1649 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1650 __FUNCTION__, cmd, arg1, arg2);
1651 return BAD_VALUE;
1652 default:
1653 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1654 __FUNCTION__, cmd, arg1, arg2);
1655 return BAD_VALUE;
1656 }
1657 }
1658
commandStartSmoothZoomL()1659 status_t Camera2Client::commandStartSmoothZoomL() {
1660 ALOGE("%s: Unimplemented!", __FUNCTION__);
1661 return OK;
1662 }
1663
commandStopSmoothZoomL()1664 status_t Camera2Client::commandStopSmoothZoomL() {
1665 ALOGE("%s: Unimplemented!", __FUNCTION__);
1666 return OK;
1667 }
1668
commandSetDisplayOrientationL(int degrees)1669 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1670 int transform = Parameters::degToTransform(degrees,
1671 mCameraFacing == CAMERA_FACING_FRONT);
1672 if (transform == -1) {
1673 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1674 __FUNCTION__, mCameraId, degrees);
1675 return BAD_VALUE;
1676 }
1677 SharedParameters::Lock l(mParameters);
1678 if (transform != l.mParameters.previewTransform &&
1679 getPreviewStreamId() != NO_STREAM) {
1680 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1681 }
1682 l.mParameters.previewTransform = transform;
1683 return OK;
1684 }
1685
commandEnableShutterSoundL(bool enable)1686 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1687 SharedParameters::Lock l(mParameters);
1688 if (enable) {
1689 l.mParameters.playShutterSound = true;
1690 return OK;
1691 }
1692
1693 l.mParameters.playShutterSound = false;
1694 return OK;
1695 }
1696
commandPlayRecordingSoundL()1697 status_t Camera2Client::commandPlayRecordingSoundL() {
1698 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1699 return OK;
1700 }
1701
commandStartFaceDetectionL(int)1702 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1703 ALOGV("%s: Camera %d: Starting face detection",
1704 __FUNCTION__, mCameraId);
1705 status_t res;
1706 SharedParameters::Lock l(mParameters);
1707 switch (l.mParameters.state) {
1708 case Parameters::DISCONNECTED:
1709 case Parameters::STOPPED:
1710 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1711 case Parameters::STILL_CAPTURE:
1712 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1713 __FUNCTION__, mCameraId);
1714 return INVALID_OPERATION;
1715 case Parameters::PREVIEW:
1716 case Parameters::RECORD:
1717 case Parameters::VIDEO_SNAPSHOT:
1718 // Good to go for starting face detect
1719 break;
1720 }
1721 // Ignoring type
1722 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1723 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1724 ALOGE("%s: Camera %d: Face detection not supported",
1725 __FUNCTION__, mCameraId);
1726 return BAD_VALUE;
1727 }
1728 if (l.mParameters.enableFaceDetect) return OK;
1729
1730 l.mParameters.enableFaceDetect = true;
1731
1732 res = updateRequests(l.mParameters);
1733
1734 return res;
1735 }
1736
commandStopFaceDetectionL(Parameters & params)1737 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1738 status_t res = OK;
1739 ALOGV("%s: Camera %d: Stopping face detection",
1740 __FUNCTION__, mCameraId);
1741
1742 if (!params.enableFaceDetect) return OK;
1743
1744 params.enableFaceDetect = false;
1745
1746 if (params.state == Parameters::PREVIEW
1747 || params.state == Parameters::RECORD
1748 || params.state == Parameters::VIDEO_SNAPSHOT) {
1749 res = updateRequests(params);
1750 }
1751
1752 return res;
1753 }
1754
commandEnableFocusMoveMsgL(bool enable)1755 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1756 SharedParameters::Lock l(mParameters);
1757 l.mParameters.enableFocusMoveMessages = enable;
1758
1759 return OK;
1760 }
1761
commandPingL()1762 status_t Camera2Client::commandPingL() {
1763 // Always ping back if access is proper and device is alive
1764 SharedParameters::Lock l(mParameters);
1765 if (l.mParameters.state != Parameters::DISCONNECTED) {
1766 return OK;
1767 } else {
1768 return NO_INIT;
1769 }
1770 }
1771
notifyError(int32_t errorCode,const CaptureResultExtras & resultExtras)1772 void Camera2Client::notifyError(int32_t errorCode,
1773 const CaptureResultExtras& resultExtras) {
1774 int32_t err = CAMERA_ERROR_UNKNOWN;
1775 switch(errorCode) {
1776 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1777 err = CAMERA_ERROR_RELEASED;
1778 break;
1779 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1780 err = CAMERA_ERROR_UNKNOWN;
1781 break;
1782 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1783 err = CAMERA_ERROR_SERVER_DIED;
1784 break;
1785 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1786 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1787 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1788 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1789 __FUNCTION__, errorCode, resultExtras.requestId);
1790
1791 if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
1792 (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
1793 Mutex::Autolock al(mLatestRequestMutex);
1794
1795 mLatestFailedRequestId = resultExtras.requestId;
1796 mLatestRequestSignal.signal();
1797 }
1798 mCaptureSequencer->notifyError(errorCode, resultExtras);
1799 return;
1800 default:
1801 err = CAMERA_ERROR_UNKNOWN;
1802 break;
1803 }
1804
1805 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1806 resultExtras.requestId);
1807
1808 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1809 if (l.mRemoteCallback != nullptr) {
1810 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1811 }
1812 }
1813
1814
1815 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1816 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1817 ALOGV("%s: Autofocus state now %d, last trigger %d",
1818 __FUNCTION__, newState, triggerId);
1819 bool sendCompletedMessage = false;
1820 bool sendMovingMessage = false;
1821
1822 bool success = false;
1823 bool afInMotion = false;
1824 {
1825 SharedParameters::Lock l(mParameters);
1826 // Trace end of AF state
1827 char tmp[32];
1828 if (l.mParameters.afStateCounter > 0) {
1829 camera_metadata_enum_snprint(
1830 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1831 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1832 }
1833
1834 // Update state
1835 l.mParameters.focusState = newState;
1836 l.mParameters.afStateCounter++;
1837
1838 // Trace start of AF state
1839
1840 camera_metadata_enum_snprint(
1841 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1842 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1843
1844 switch (l.mParameters.focusMode) {
1845 case Parameters::FOCUS_MODE_AUTO:
1846 case Parameters::FOCUS_MODE_MACRO:
1847 // Don't send notifications upstream if they're not for the current AF
1848 // trigger. For example, if cancel was called in between, or if we
1849 // already sent a notification about this AF call.
1850 if (triggerId != l.mParameters.currentAfTriggerId) break;
1851 switch (newState) {
1852 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1853 success = true;
1854 FALLTHROUGH_INTENDED;
1855 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1856 sendCompletedMessage = true;
1857 l.mParameters.currentAfTriggerId = -1;
1858 break;
1859 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1860 // Just starting focusing, ignore
1861 break;
1862 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1863 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1864 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1865 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1866 default:
1867 // Unexpected in AUTO/MACRO mode
1868 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1869 __FUNCTION__, newState);
1870 break;
1871 }
1872 break;
1873 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1874 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1875 switch (newState) {
1876 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1877 success = true;
1878 FALLTHROUGH_INTENDED;
1879 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1880 // Don't send notifications upstream if they're not for
1881 // the current AF trigger. For example, if cancel was
1882 // called in between, or if we already sent a
1883 // notification about this AF call.
1884 // Send both a 'AF done' callback and a 'AF move' callback
1885 if (triggerId != l.mParameters.currentAfTriggerId) break;
1886 sendCompletedMessage = true;
1887 afInMotion = false;
1888 if (l.mParameters.enableFocusMoveMessages &&
1889 l.mParameters.afInMotion) {
1890 sendMovingMessage = true;
1891 }
1892 l.mParameters.currentAfTriggerId = -1;
1893 break;
1894 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1895 // Cancel was called, or we switched state; care if
1896 // currently moving
1897 afInMotion = false;
1898 if (l.mParameters.enableFocusMoveMessages &&
1899 l.mParameters.afInMotion) {
1900 sendMovingMessage = true;
1901 }
1902 break;
1903 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1904 // Start passive scan, inform upstream
1905 afInMotion = true;
1906 FALLTHROUGH_INTENDED;
1907 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1908 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1909 // Stop passive scan, inform upstream
1910 if (l.mParameters.enableFocusMoveMessages) {
1911 sendMovingMessage = true;
1912 }
1913 break;
1914 }
1915 l.mParameters.afInMotion = afInMotion;
1916 break;
1917 case Parameters::FOCUS_MODE_EDOF:
1918 case Parameters::FOCUS_MODE_INFINITY:
1919 case Parameters::FOCUS_MODE_FIXED:
1920 default:
1921 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1922 ALOGE("%s: Unexpected AF state change %d "
1923 "(ID %d) in focus mode %d",
1924 __FUNCTION__, newState, triggerId,
1925 l.mParameters.focusMode);
1926 }
1927 }
1928 }
1929 if (sendMovingMessage) {
1930 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1931 if (l.mRemoteCallback != 0) {
1932 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1933 afInMotion ? 1 : 0, 0);
1934 }
1935 }
1936 if (sendCompletedMessage) {
1937 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1938 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1939 if (l.mRemoteCallback != 0) {
1940 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1941 success ? 1 : 0, 0);
1942 }
1943 }
1944 }
1945
notifyAutoExposure(uint8_t newState,int triggerId)1946 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
1947 ALOGV("%s: Autoexposure state now %d, last trigger %d",
1948 __FUNCTION__, newState, triggerId);
1949 {
1950 SharedParameters::Lock l(mParameters);
1951 // Update state
1952 l.mParameters.aeState = newState;
1953 }
1954 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
1955 }
1956
notifyShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)1957 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
1958 nsecs_t timestamp) {
1959 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
1960 __FUNCTION__, resultExtras.requestId, timestamp);
1961 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
1962
1963 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
1964 }
1965
getParameters()1966 camera2::SharedParameters& Camera2Client::getParameters() {
1967 return mParameters;
1968 }
1969
getPreviewStreamId() const1970 int Camera2Client::getPreviewStreamId() const {
1971 return mStreamingProcessor->getPreviewStreamId();
1972 }
1973
getCaptureStreamId() const1974 int Camera2Client::getCaptureStreamId() const {
1975 return mJpegProcessor->getStreamId();
1976 }
1977
getCallbackStreamId() const1978 int Camera2Client::getCallbackStreamId() const {
1979 return mCallbackProcessor->getStreamId();
1980 }
1981
getRecordingStreamId() const1982 int Camera2Client::getRecordingStreamId() const {
1983 return mStreamingProcessor->getRecordingStreamId();
1984 }
1985
getZslStreamId() const1986 int Camera2Client::getZslStreamId() const {
1987 return mZslProcessor->getStreamId();
1988 }
1989
registerFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener,bool sendPartials)1990 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
1991 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
1992 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
1993 }
1994
removeFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener)1995 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
1996 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
1997 return mFrameProcessor->removeListener(minId, maxId, listener);
1998 }
1999
stopStream()2000 status_t Camera2Client::stopStream() {
2001 return mStreamingProcessor->stopStream();
2002 }
2003
createJpegStreamL(Parameters & params)2004 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
2005 status_t res = OK;
2006 int lastJpegStreamId = mJpegProcessor->getStreamId();
2007 if (lastJpegStreamId != NO_STREAM) {
2008 return INVALID_OPERATION;
2009 }
2010
2011 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2012 if (res != OK) {
2013 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2014 __FUNCTION__, mCameraId, strerror(-res), res);
2015 return res;
2016 }
2017
2018 res = mDevice->flush();
2019 if (res != OK) {
2020 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
2021 __FUNCTION__, mCameraId, strerror(-res), res);
2022 return res;
2023 }
2024
2025 // Ideally we don't need this, but current camera device
2026 // status tracking mechanism demands it.
2027 res = mDevice->waitUntilDrained();
2028 if (res != OK) {
2029 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
2030 __FUNCTION__, mCameraId, strerror(-res), res);
2031 }
2032
2033 res = updateProcessorStream(mJpegProcessor, params);
2034 return res;
2035 }
2036
2037 const int32_t Camera2Client::kPreviewRequestIdStart;
2038 const int32_t Camera2Client::kPreviewRequestIdEnd;
2039 const int32_t Camera2Client::kRecordingRequestIdStart;
2040 const int32_t Camera2Client::kRecordingRequestIdEnd;
2041 const int32_t Camera2Client::kCaptureRequestIdStart;
2042 const int32_t Camera2Client::kCaptureRequestIdEnd;
2043
2044 /** Utility methods */
2045
updateRequests(Parameters & params)2046 status_t Camera2Client::updateRequests(Parameters ¶ms) {
2047 status_t res;
2048
2049 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
2050
2051 res = mStreamingProcessor->incrementStreamingIds();
2052 if (res != OK) {
2053 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
2054 __FUNCTION__, mCameraId, strerror(-res), res);
2055 return res;
2056 }
2057
2058 res = mStreamingProcessor->updatePreviewRequest(params);
2059 if (res != OK) {
2060 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
2061 __FUNCTION__, mCameraId, strerror(-res), res);
2062 return res;
2063 }
2064 res = mStreamingProcessor->updateRecordingRequest(params);
2065 if (res != OK) {
2066 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
2067 __FUNCTION__, mCameraId, strerror(-res), res);
2068 return res;
2069 }
2070
2071 if (params.state == Parameters::PREVIEW) {
2072 res = startPreviewL(params, true);
2073 if (res != OK) {
2074 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2075 __FUNCTION__, mCameraId, strerror(-res), res);
2076 return res;
2077 }
2078 } else if (params.state == Parameters::RECORD ||
2079 params.state == Parameters::VIDEO_SNAPSHOT) {
2080 res = startRecordingL(params, true);
2081 if (res != OK) {
2082 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2083 __FUNCTION__, mCameraId, strerror(-res), res);
2084 return res;
2085 }
2086 }
2087 return res;
2088 }
2089
2090
calculateBufferSize(int width,int height,int format,int stride)2091 size_t Camera2Client::calculateBufferSize(int width, int height,
2092 int format, int stride) {
2093 switch (format) {
2094 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2095 return width * height * 2;
2096 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2097 return width * height * 3 / 2;
2098 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2099 return width * height * 2;
2100 case HAL_PIXEL_FORMAT_YV12: { // YV12
2101 size_t ySize = stride * height;
2102 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2103 size_t uvSize = uvStride * height / 2;
2104 return ySize + uvSize * 2;
2105 }
2106 case HAL_PIXEL_FORMAT_RGB_565:
2107 return width * height * 2;
2108 case HAL_PIXEL_FORMAT_RGBA_8888:
2109 return width * height * 4;
2110 case HAL_PIXEL_FORMAT_RAW16:
2111 return width * height * 2;
2112 default:
2113 ALOGE("%s: Unknown preview format: %x",
2114 __FUNCTION__, format);
2115 return 0;
2116 }
2117 }
2118
syncWithDevice()2119 status_t Camera2Client::syncWithDevice() {
2120 ATRACE_CALL();
2121 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2122 status_t res;
2123
2124 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2125 if (activeRequestId == 0) return OK;
2126
2127 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2128 if (res == TIMED_OUT) {
2129 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2130 __FUNCTION__, mCameraId);
2131 } else if (res != OK) {
2132 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2133 __FUNCTION__, mCameraId);
2134 }
2135 return res;
2136 }
2137
2138 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)2139 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2140 camera2::Parameters params) {
2141 // No default template arguments until C++11, so we need this overload
2142 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2143 processor, params);
2144 }
2145
2146 template <typename ProcessorT,
2147 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)2148 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2149 Parameters params) {
2150 status_t res;
2151
2152 // Get raw pointer since sp<T> doesn't have operator->*
2153 ProcessorT *processorPtr = processor.get();
2154 res = (processorPtr->*updateStreamF)(params);
2155
2156 /**
2157 * Can't update the stream if it's busy?
2158 *
2159 * Then we need to stop the device (by temporarily clearing the request
2160 * queue) and then try again. Resume streaming once we're done.
2161 */
2162 if (res == -EBUSY) {
2163 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2164 mCameraId);
2165 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2166 if (res != OK) {
2167 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2168 __FUNCTION__, mCameraId, strerror(-res), res);
2169 }
2170
2171 res = mDevice->waitUntilDrained();
2172 if (res != OK) {
2173 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2174 __FUNCTION__, mCameraId, strerror(-res), res);
2175 }
2176
2177 res = (processorPtr->*updateStreamF)(params);
2178 if (res != OK) {
2179 ALOGE("%s: Camera %d: Failed to update processing stream "
2180 " despite having halted streaming first: %s (%d)",
2181 __FUNCTION__, mCameraId, strerror(-res), res);
2182 }
2183
2184 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2185 if (res != OK) {
2186 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2187 __FUNCTION__, mCameraId, strerror(-res), res);
2188 }
2189 }
2190
2191 return res;
2192 }
2193
overrideVideoSnapshotSize(Parameters & params)2194 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2195 ALOGV("%s: Camera %d: configure still size to video size before recording"
2196 , __FUNCTION__, mCameraId);
2197 params.overrideJpegSizeByVideoSize();
2198 status_t res = updateProcessorStream(mJpegProcessor, params);
2199 if (res != OK) {
2200 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2201 __FUNCTION__, mCameraId, strerror(-res), res);
2202 }
2203 return res;
2204 }
2205
setVideoTarget(const sp<IGraphicBufferProducer> & bufferProducer)2206 status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
2207 ATRACE_CALL();
2208 ALOGV("%s: E", __FUNCTION__);
2209 Mutex::Autolock icl(mBinderSerializationLock);
2210 status_t res;
2211 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2212
2213 sp<IBinder> binder = IInterface::asBinder(bufferProducer);
2214 if (binder == mVideoSurface) {
2215 ALOGV("%s: Camera %d: New video window is same as old video window",
2216 __FUNCTION__, mCameraId);
2217 return NO_ERROR;
2218 }
2219
2220 sp<Surface> window;
2221 int format;
2222 android_dataspace dataSpace;
2223
2224 if (bufferProducer != nullptr) {
2225 // Using controlledByApp flag to ensure that the buffer queue remains in
2226 // async mode for the old camera API, where many applications depend
2227 // on that behavior.
2228 window = new Surface(bufferProducer, /*controlledByApp*/ true);
2229
2230 ANativeWindow *anw = window.get();
2231
2232 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2233 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2234 return res;
2235 }
2236
2237 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2238 reinterpret_cast<int*>(&dataSpace))) != OK) {
2239 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2240 return res;
2241 }
2242 }
2243
2244 Parameters::State state;
2245 {
2246 SharedParameters::Lock l(mParameters);
2247 state = l.mParameters.state;
2248 }
2249
2250 switch (state) {
2251 case Parameters::STOPPED:
2252 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2253 case Parameters::PREVIEW:
2254 // OK
2255 break;
2256 case Parameters::DISCONNECTED:
2257 case Parameters::RECORD:
2258 case Parameters::STILL_CAPTURE:
2259 case Parameters::VIDEO_SNAPSHOT:
2260 default:
2261 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2262 __FUNCTION__, mCameraId,
2263 Parameters::getStateName(state));
2264 return INVALID_OPERATION;
2265 }
2266
2267 mVideoSurface = binder;
2268 res = mStreamingProcessor->setRecordingWindow(window);
2269 if (res != OK) {
2270 ALOGE("%s: Unable to set new recording window: %s (%d)",
2271 __FUNCTION__, strerror(-res), res);
2272 return res;
2273 }
2274
2275 {
2276 SharedParameters::Lock l(mParameters);
2277 l.mParameters.videoFormat = format;
2278 l.mParameters.videoDataSpace = dataSpace;
2279 }
2280
2281 return OK;
2282 }
2283
setAudioRestriction(int)2284 status_t Camera2Client::setAudioRestriction(int /*mode*/) {
2285 // Empty implementation. setAudioRestriction is hidden interface and not
2286 // supported by android.hardware.Camera API
2287 return INVALID_OPERATION;
2288 }
2289
getGlobalAudioRestriction()2290 int32_t Camera2Client::getGlobalAudioRestriction() {
2291 // Empty implementation. getAudioRestriction is hidden interface and not
2292 // supported by android.hardware.Camera API
2293 return INVALID_OPERATION;
2294 }
2295
setRotateAndCropOverride(uint8_t rotateAndCrop)2296 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop) {
2297 if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
2298
2299 return mDevice->setRotateAndCropAutoBehavior(
2300 static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
2301 }
2302
supportsCameraMute()2303 bool Camera2Client::supportsCameraMute() {
2304 return mDevice->supportsCameraMute();
2305 }
2306
setCameraMute(bool enabled)2307 status_t Camera2Client::setCameraMute(bool enabled) {
2308 return mDevice->setCameraMute(enabled);
2309 }
2310
waitUntilCurrentRequestIdLocked()2311 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
2312 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2313 if (activeRequestId != 0) {
2314 auto res = waitUntilRequestIdApplied(activeRequestId,
2315 mDevice->getExpectedInFlightDuration());
2316 if (res == TIMED_OUT) {
2317 ALOGE("%s: Camera %d: Timed out waiting for current request id to return in results!",
2318 __FUNCTION__, mCameraId);
2319 return res;
2320 } else if (res != OK) {
2321 ALOGE("%s: Camera %d: Error while waiting for current request id to return in results!",
2322 __FUNCTION__, mCameraId);
2323 return res;
2324 }
2325 }
2326
2327 return OK;
2328 }
2329
waitUntilRequestIdApplied(int32_t requestId,nsecs_t timeout)2330 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
2331 Mutex::Autolock l(mLatestRequestMutex);
2332 while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
2333 nsecs_t startTime = systemTime();
2334
2335 auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
2336 if (res != OK) return res;
2337
2338 timeout -= (systemTime() - startTime);
2339 }
2340
2341 return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
2342 }
2343
notifyRequestId(int32_t requestId)2344 void Camera2Client::notifyRequestId(int32_t requestId) {
2345 Mutex::Autolock al(mLatestRequestMutex);
2346
2347 mLatestRequestId = requestId;
2348 mLatestRequestSignal.signal();
2349 }
2350
2351 const char* Camera2Client::kAutofocusLabel = "autofocus";
2352 const char* Camera2Client::kTakepictureLabel = "take_picture";
2353
2354 } // namespace android
2355