1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <sstream>
22
23 #include <inttypes.h>
24 #include <utils/Log.h>
25 #include <utils/Trace.h>
26
27 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
28 #include <camera/CameraUtils.h>
29 #include <camera/StringUtils.h>
30 #include <com_android_internal_camera_flags.h>
31 #include <com_android_window_flags.h>
32 #include <cutils/properties.h>
33 #include <gui/Surface.h>
34 #include <gui/view/Surface.h>
35
36 #include "api1/Camera2Client.h"
37
38 #include "api1/client2/StreamingProcessor.h"
39 #include "api1/client2/JpegProcessor.h"
40 #include "api1/client2/CaptureSequencer.h"
41 #include "api1/client2/CallbackProcessor.h"
42 #include "api1/client2/ZslProcessor.h"
43 #include "device3/RotateAndCropMapper.h"
44 #include "utils/CameraServiceProxyWrapper.h"
45
46 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
47 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
48
49 #ifndef FALLTHROUGH_INTENDED
50 #define FALLTHROUGH_INTENDED [[fallthrough]]
51 #endif
52
53 namespace android {
54 using namespace camera2;
55
56 namespace flags = com::android::internal::camera::flags;
57 namespace wm_flags = com::android::window::flags;
58
59 // Interface used by CameraService
60
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,const AttributionSourceState & clientAttribution,int callingPid,const std::string & cameraDeviceId,int api1CameraId,int cameraFacing,int sensorOrientation,int servicePid,bool overrideForPerfClass,int rotationOverride,bool forceSlowJpegMode,bool sharedMode)61 Camera2Client::Camera2Client(
62 const sp<CameraService>& cameraService, const sp<hardware::ICameraClient>& cameraClient,
63 std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
64 std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
65 const AttributionSourceState& clientAttribution, int callingPid,
66 const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
67 int sensorOrientation, int servicePid, bool overrideForPerfClass, int rotationOverride,
68 bool forceSlowJpegMode, bool sharedMode)
69 : Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
70 attributionAndPermissionUtils, clientAttribution, callingPid,
71 false /*systemNativeClient - since no ndk for api1*/, cameraDeviceId,
72 api1CameraId, cameraFacing, sensorOrientation, servicePid,
73 overrideForPerfClass, rotationOverride, sharedMode,
74 /*isVendorClient*/ false, /*legacyClient*/ true),
75 mParameters(api1CameraId, cameraFacing),
76 mInitialized(false),
77 mLatestRequestIds(kMaxRequestIds),
78 mLatestFailedRequestIds(kMaxRequestIds) {
79 ATRACE_CALL();
80
81 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
82 mRotateAndCropIsSupported = false;
83 mRotateAndCropPreviewTransform = 0;
84
85 SharedParameters::Lock l(mParameters);
86 l.mParameters.state = Parameters::DISCONNECTED;
87 l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
88 }
89
initialize(sp<CameraProviderManager> manager,const std::string & monitorTags)90 status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
91 const std::string& monitorTags) {
92 return initializeImpl(manager, monitorTags);
93 }
94
isZslEnabledInStillTemplate()95 bool Camera2Client::isZslEnabledInStillTemplate() {
96 bool zslEnabled = false;
97 CameraMetadata stillTemplate;
98 status_t res = mDevice->createDefaultRequest(
99 camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
100 if (res == OK) {
101 camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
102 if (enableZsl.count == 1) {
103 zslEnabled = (enableZsl.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE);
104 }
105 }
106
107 return zslEnabled;
108 }
109
110 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr,const std::string & monitorTags)111 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags)
112 {
113 ATRACE_CALL();
114 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
115 status_t res;
116
117 res = Camera2ClientBase::initialize(providerPtr, monitorTags);
118 if (res != OK) {
119 return res;
120 }
121
122 {
123 SharedParameters::Lock l(mParameters);
124
125 res = l.mParameters.initialize(mDevice.get());
126 if (res != OK) {
127 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
128 __FUNCTION__, mCameraId, strerror(-res), res);
129 return NO_INIT;
130 }
131
132 l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
133 }
134
135 const CameraMetadata& staticInfo = mDevice->info();
136 mRotateAndCropIsSupported = camera3::RotateAndCropMapper::isNeeded(&staticInfo);
137 // The 'mRotateAndCropMode' value only accounts for the necessary adjustment
138 // when the display rotates. The sensor orientation still needs to be calculated
139 // and applied similar to the Camera2 path.
140 using hardware::BnCameraService::ROTATION_OVERRIDE_ROTATION_ONLY;
141 bool enableTransformInverseDisplay = true;
142 if (wm_flags::enable_camera_compat_for_desktop_windowing()) {
143 enableTransformInverseDisplay = (mRotationOverride != ROTATION_OVERRIDE_ROTATION_ONLY);
144 }
145 CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
146 enableTransformInverseDisplay, &mRotateAndCropPreviewTransform);
147
148 mStreamingProcessor = new StreamingProcessor(this);
149
150 std::string threadName = std::string("C2-") + std::to_string(mCameraId);
151 mFrameProcessor = new FrameProcessor(mDevice, this);
152 res = mFrameProcessor->run((threadName + "-FrameProc").c_str());
153 if (res != OK) {
154 ALOGE("%s: Unable to start frame processor thread: %s (%d)",
155 __FUNCTION__, strerror(-res), res);
156 return res;
157 }
158
159 mCaptureSequencer = new CaptureSequencer(this);
160 res = mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
161 if (res != OK) {
162 ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
163 __FUNCTION__, strerror(-res), res);
164 return res;
165 }
166
167 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
168 res = mJpegProcessor->run((threadName + "-JpegProc").c_str());
169 if (res != OK) {
170 ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
171 __FUNCTION__, strerror(-res), res);
172 return res;
173 }
174
175 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
176 res = mZslProcessor->run((threadName + "-ZslProc").c_str());
177 if (res != OK) {
178 ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
179 __FUNCTION__, strerror(-res), res);
180 return res;
181 }
182
183 mCallbackProcessor = new CallbackProcessor(this);
184 res = mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
185 if (res != OK) {
186 ALOGE("%s: Unable to start callback processor thread: %s (%d)",
187 __FUNCTION__, strerror(-res), res);
188 return res;
189 }
190
191 if (gLogLevel >= 1) {
192 SharedParameters::Lock l(mParameters);
193 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
194 mCameraId);
195 ALOGD("%s", l.mParameters.paramsFlattened.c_str());
196 }
197
198 mInitialized = true;
199 return OK;
200 }
201
~Camera2Client()202 Camera2Client::~Camera2Client() {
203 ATRACE_CALL();
204 ALOGV("~Camera2Client");
205
206 mDestructionStarted = true;
207
208 disconnect();
209
210 ALOGI("Camera %d: Closed", mCameraId);
211 }
212
dump(int fd,const Vector<String16> & args)213 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
214 return BasicClient::dump(fd, args);
215 }
216
dumpClient(int fd,const Vector<String16> & args)217 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
218 std::ostringstream result;
219 result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
220 (getRemoteCallback() != NULL ?
221 (void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
222 mCallingPid);
223 result << " State: ";
224 #define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
225
226 const Parameters& p = mParameters.unsafeAccess();
227
228 result << Parameters::getStateName(p.state);
229
230 result << "\n Current parameters:\n";
231 result << fmt::sprintf(" Preview size: %d x %d\n",
232 p.previewWidth, p.previewHeight);
233 result << fmt::sprintf(" Preview FPS range: %d - %d\n",
234 p.previewFpsRange[0], p.previewFpsRange[1]);
235 result << fmt::sprintf(" Preview HAL pixel format: 0x%x\n",
236 p.previewFormat);
237 result << fmt::sprintf(" Preview transform: %x\n",
238 p.previewTransform);
239 result << fmt::sprintf(" Picture size: %d x %d\n",
240 p.pictureWidth, p.pictureHeight);
241 result << fmt::sprintf(" Jpeg thumbnail size: %d x %d\n",
242 p.jpegThumbSize[0], p.jpegThumbSize[1]);
243 result << fmt::sprintf(" Jpeg quality: %d, thumbnail quality: %d\n",
244 p.jpegQuality, p.jpegThumbQuality);
245 result << fmt::sprintf(" Jpeg rotation: %d\n", p.jpegRotation);
246 result << fmt::sprintf(" GPS tags %s\n",
247 p.gpsEnabled ? "enabled" : "disabled");
248 if (p.gpsEnabled) {
249 result << fmt::sprintf(" GPS lat x long x alt: %f x %f x %f\n",
250 p.gpsCoordinates[0], p.gpsCoordinates[1],
251 p.gpsCoordinates[2]);
252 result << fmt::sprintf(" GPS timestamp: %" PRId64 "\n",
253 p.gpsTimestamp);
254 result << fmt::sprintf(" GPS processing method: %s\n",
255 p.gpsProcessingMethod.c_str());
256 }
257
258 result << " White balance mode: ";
259 switch (p.wbMode) {
260 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
261 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
262 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
263 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
264 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
265 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
266 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
267 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
268 default: result << "UNKNOWN\n";
269 }
270
271 result << " Effect mode: ";
272 switch (p.effectMode) {
273 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
274 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
275 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
276 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
277 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
278 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
279 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
280 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
281 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
282 default: result << "UNKNOWN\n";
283 }
284
285 result << " Antibanding mode: ";
286 switch (p.antibandingMode) {
287 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
288 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
289 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
290 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
291 default: result << "UNKNOWN\n";
292 }
293
294 result << " Scene mode: ";
295 switch (p.sceneMode) {
296 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
297 result << "AUTO\n"; break;
298 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
299 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
300 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
301 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
302 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
303 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
304 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
305 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
306 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
307 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
308 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
309 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
310 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
311 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
312 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
313 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
314 default: result << "UNKNOWN\n";
315 }
316
317 result << " Flash mode: ";
318 switch (p.flashMode) {
319 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
320 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
321 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
322 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
323 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
324 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
325 default: result << "UNKNOWN\n";
326 }
327
328 result << " Focus mode: ";
329 switch (p.focusMode) {
330 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
331 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
332 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
333 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
334 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
335 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
336 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
337 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
338 default: result << "UNKNOWN\n";
339 }
340
341 result << " Focus state: ";
342 switch (p.focusState) {
343 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
344 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
345 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
346 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
347 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
348 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
349 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
350 default: result << "UNKNOWN\n";
351 }
352
353 result << " Focusing areas:\n";
354 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
355 result << fmt::sprintf(" [ (%d, %d, %d, %d), weight %d ]\n",
356 p.focusingAreas[i].left,
357 p.focusingAreas[i].top,
358 p.focusingAreas[i].right,
359 p.focusingAreas[i].bottom,
360 p.focusingAreas[i].weight);
361 }
362
363 result << fmt::sprintf(" Exposure compensation index: %d\n",
364 p.exposureCompensation);
365
366 result << fmt::sprintf(" AE lock %s, AWB lock %s\n",
367 p.autoExposureLock ? "enabled" : "disabled",
368 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
369
370 result << " Metering areas:\n";
371 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
372 result << fmt::sprintf(" [ (%d, %d, %d, %d), weight %d ]\n",
373 p.meteringAreas[i].left,
374 p.meteringAreas[i].top,
375 p.meteringAreas[i].right,
376 p.meteringAreas[i].bottom,
377 p.meteringAreas[i].weight);
378 }
379
380 result << fmt::sprintf(" Zoom index: %d\n", p.zoom);
381 result << fmt::sprintf(" Video size: %d x %d\n", p.videoWidth,
382 p.videoHeight);
383
384 result << fmt::sprintf(" Recording hint is %s\n",
385 p.recordingHint ? "set" : "not set");
386
387 result << fmt::sprintf(" Video stabilization is %s\n",
388 p.videoStabilization ? "enabled" : "disabled");
389
390 result << fmt::sprintf(" Selected still capture FPS range: %d - %d\n",
391 p.fastInfo.bestStillCaptureFpsRange[0],
392 p.fastInfo.bestStillCaptureFpsRange[1]);
393
394 result << fmt::sprintf(" Use zero shutter lag: %s\n",
395 p.useZeroShutterLag() ? "yes" : "no");
396
397 result << " Current streams:\n";
398 result << fmt::sprintf(" Preview stream ID: %d\n",
399 getPreviewStreamId());
400 result << fmt::sprintf(" Capture stream ID: %d\n",
401 getCaptureStreamId());
402 result << fmt::sprintf(" Recording stream ID: %d\n",
403 getRecordingStreamId());
404
405 result << " Quirks for this camera:\n";
406 bool haveQuirk = false;
407 if (p.quirks.triggerAfWithAuto) {
408 result << " triggerAfWithAuto\n";
409 haveQuirk = true;
410 }
411 if (p.quirks.useZslFormat) {
412 result << " useZslFormat\n";
413 haveQuirk = true;
414 }
415 if (p.quirks.meteringCropRegion) {
416 result << " meteringCropRegion\n";
417 haveQuirk = true;
418 }
419 if (p.quirks.partialResults) {
420 result << " usePartialResult\n";
421 haveQuirk = true;
422 }
423 if (!haveQuirk) {
424 result << " none\n";
425 }
426
427 std::string resultStr = result.str();
428
429 write(fd, resultStr.c_str(), resultStr.size());
430
431 mStreamingProcessor->dump(fd, args);
432
433 mCaptureSequencer->dump(fd, args);
434
435 mFrameProcessor->dump(fd, args);
436
437 mZslProcessor->dump(fd, args);
438
439 return dumpDevice(fd, args);
440 #undef CASE_APPEND_ENUM
441 }
442
443 // ICamera interface
444
disconnect()445 binder::Status Camera2Client::disconnect() {
446 ATRACE_CALL();
447 nsecs_t startTime = systemTime();
448 Mutex::Autolock icl(mBinderSerializationLock);
449
450 binder::Status res = binder::Status::ok();
451 // Allow both client and the cameraserver to disconnect at all times
452 int callingPid = getCallingPid();
453 if (callingPid != mCallingPid && callingPid != mServicePid) return res;
454
455 if (mDevice == nullptr) return res;
456
457 ALOGV("Camera %d: Shutting down", mCameraId);
458
459 /**
460 * disconnect() cannot call any methods that might need to promote a
461 * wp<Camera2Client>, since disconnect can be called from the destructor, at
462 * which point all such promotions will fail.
463 */
464
465 stopPreviewL();
466
467 {
468 SharedParameters::Lock l(mParameters);
469 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
470 l.mParameters.state = Parameters::DISCONNECTED;
471 }
472
473 if (mFrameProcessor != nullptr) mFrameProcessor->requestExit();
474 if (mCaptureSequencer != nullptr) mCaptureSequencer->requestExit();
475 if (mJpegProcessor != nullptr) mJpegProcessor->requestExit();
476 if (mZslProcessor != nullptr) mZslProcessor->requestExit();
477 if (mCallbackProcessor != nullptr) mCallbackProcessor->requestExit();
478
479 ALOGV("Camera %d: Waiting for threads", mCameraId);
480
481 {
482 // Don't wait with lock held, in case the other threads need to
483 // complete callbacks that re-enter Camera2Client
484 mBinderSerializationLock.unlock();
485
486 if (mFrameProcessor != nullptr) mFrameProcessor->join();
487 if (mCaptureSequencer != nullptr) mCaptureSequencer->join();
488 if (mJpegProcessor != nullptr) mJpegProcessor->join();
489 if (mZslProcessor != nullptr) mZslProcessor->join();
490 if (mCallbackProcessor != nullptr) mCallbackProcessor->join();
491
492 mBinderSerializationLock.lock();
493 }
494
495 ALOGV("Camera %d: Deleting streams", mCameraId);
496
497 if (mStreamingProcessor != nullptr) {
498 mStreamingProcessor->deletePreviewStream();
499 mStreamingProcessor->deleteRecordingStream();
500 }
501 if (mJpegProcessor != nullptr) mJpegProcessor->deleteStream();
502 if (mCallbackProcessor != nullptr) mCallbackProcessor->deleteStream();
503 if (mZslProcessor != nullptr) mZslProcessor->deleteStream();
504
505 ALOGV("Camera %d: Disconnecting device", mCameraId);
506
507 bool hasDeviceError = mDevice->hasDeviceError();
508 mDevice->disconnect();
509
510 if (flags::api1_release_binderlock_before_cameraservice_disconnect()) {
511 // CameraService::Client::disconnect calls CameraService which attempts to lock
512 // CameraService's mServiceLock. This might lead to a deadlock if the cameraservice is
513 // currently waiting to lock mSerializationLock on another thread.
514 mBinderSerializationLock.unlock();
515 CameraService::Client::disconnect();
516 mBinderSerializationLock.lock();
517 } else {
518 CameraService::Client::disconnect();
519 }
520
521 int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
522 mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
523
524 return res;
525 }
526
connect(const sp<hardware::ICameraClient> & client)527 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
528 ATRACE_CALL();
529 ALOGV("%s: E", __FUNCTION__);
530 Mutex::Autolock icl(mBinderSerializationLock);
531
532 if (mCallingPid != 0 && getCallingPid() != mCallingPid) {
533 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
534 "current locked to pid %d", __FUNCTION__,
535 mCameraId, getCallingPid(), mCallingPid);
536 return BAD_VALUE;
537 }
538
539 mCallingPid = getCallingPid();
540
541 mRemoteCallback = client;
542 mSharedCameraCallbacks = client;
543
544 return OK;
545 }
546
lock()547 status_t Camera2Client::lock() {
548 ATRACE_CALL();
549 ALOGV("%s: E", __FUNCTION__);
550 Mutex::Autolock icl(mBinderSerializationLock);
551 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
552 __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
553
554 if (mCallingPid == 0) {
555 mCallingPid = getCallingPid();
556 return OK;
557 }
558
559 if (mCallingPid != getCallingPid()) {
560 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
561 __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
562 return EBUSY;
563 }
564
565 return OK;
566 }
567
unlock()568 status_t Camera2Client::unlock() {
569 ATRACE_CALL();
570 ALOGV("%s: E", __FUNCTION__);
571 Mutex::Autolock icl(mBinderSerializationLock);
572 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
573 __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
574
575 if (mCallingPid == getCallingPid()) {
576 SharedParameters::Lock l(mParameters);
577 if (l.mParameters.state == Parameters::RECORD ||
578 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
579 ALOGD("Not allowed to unlock camera during recording.");
580 return INVALID_OPERATION;
581 }
582 mCallingPid = 0;
583 mRemoteCallback.clear();
584 mSharedCameraCallbacks.clear();
585 return OK;
586 }
587
588 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
589 __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
590 return EBUSY;
591 }
592
setPreviewTarget(const sp<SurfaceType> & target)593 status_t Camera2Client::setPreviewTarget(const sp<SurfaceType>& target) {
594 ATRACE_CALL();
595 ALOGV("%s: E", __FUNCTION__);
596 Mutex::Autolock icl(mBinderSerializationLock);
597 status_t res;
598 if ((res = checkPid(__FUNCTION__)) != OK) return res;
599
600 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
601 sp<Surface> surface;
602 view::Surface viewSurface;
603 if (target != nullptr) {
604 // Using controlledByApp flag to ensure that the buffer queue remains in
605 // async mode for the old camera API, where many applications depend
606 // on that behavior.
607 surface = new Surface(target->getIGraphicBufferProducer(), true);
608 viewSurface = view::Surface::fromSurface(surface);
609 }
610 return setPreviewWindowL(viewSurface, surface);
611 #else
612 sp<IBinder> binder;
613 sp<Surface> window;
614 if (target != 0) {
615 binder = IInterface::asBinder(target);
616 // Using controlledByApp flag to ensure that the buffer queue remains in
617 // async mode for the old camera API, where many applications depend
618 // on that behavior.
619 window = new Surface(target, /*controlledByApp*/ true);
620 }
621 return setPreviewWindowL(binder, window);
622 #endif
623 }
624
625 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
setPreviewWindowL(const view::Surface & viewSurface,const sp<Surface> & window)626 status_t Camera2Client::setPreviewWindowL(const view::Surface& viewSurface,
627 const sp<Surface>& window) {
628 ATRACE_CALL();
629 status_t res;
630
631 uint64_t viewSurfaceID;
632 res = viewSurface.getUniqueId(&viewSurfaceID);
633 if (res != OK) {
634 ALOGE("%s: Camera %d: Could not getUniqueId.", __FUNCTION__, mCameraId);
635 return res;
636 }
637
638 if (viewSurfaceID == mPreviewViewSurfaceID) {
639 ALOGV("%s: Camera %d: New window is same as old window", __FUNCTION__, mCameraId);
640 return NO_ERROR;
641 }
642 #else
643 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
644 const sp<Surface>& window) {
645 ATRACE_CALL();
646 status_t res;
647
648 if (binder == mPreviewSurface) {
649 ALOGV("%s: Camera %d: New window is same as old window",
650 __FUNCTION__, mCameraId);
651 return NO_ERROR;
652 }
653 #endif
654
655 Parameters::State state;
656 {
657 SharedParameters::Lock l(mParameters);
658 state = l.mParameters.state;
659 }
660 switch (state) {
661 case Parameters::DISCONNECTED:
662 case Parameters::RECORD:
663 case Parameters::STILL_CAPTURE:
664 case Parameters::VIDEO_SNAPSHOT:
665 ALOGE("%s: Camera %d: Cannot set preview display while in state %s", __FUNCTION__,
666 mCameraId, Parameters::getStateName(state));
667 return INVALID_OPERATION;
668 case Parameters::STOPPED:
669 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
670 // OK
671 break;
672 case Parameters::PREVIEW:
673 // Already running preview - need to stop and create a new stream
674 res = stopStream();
675 if (res != OK) {
676 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", __FUNCTION__,
677 strerror(-res), res);
678 return res;
679 }
680 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
681 break;
682 }
683
684 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
685 mPreviewViewSurfaceID = viewSurfaceID;
686 #else
687 mPreviewSurface = binder;
688 #endif
689
690 res = mStreamingProcessor->setPreviewWindow(window);
691 if (res != OK) {
692 ALOGE("%s: Unable to set new preview window: %s (%d)", __FUNCTION__, strerror(-res), res);
693 return res;
694 }
695
696 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
697 SharedParameters::Lock l(mParameters);
698 l.mParameters.state = state;
699 return startPreviewL(l.mParameters, false);
700 }
701
702 return OK;
703 }
704
705 void Camera2Client::setPreviewCallbackFlag(int flag) {
706 ATRACE_CALL();
707 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
708 Mutex::Autolock icl(mBinderSerializationLock);
709
710 if ( checkPid(__FUNCTION__) != OK) return;
711
712 SharedParameters::Lock l(mParameters);
713 setPreviewCallbackFlagL(l.mParameters, flag);
714 }
715
716 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
717 status_t res = OK;
718
719 switch(params.state) {
720 case Parameters::STOPPED:
721 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
722 case Parameters::PREVIEW:
723 case Parameters::STILL_CAPTURE:
724 // OK
725 break;
726 default:
727 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
728 ALOGE("%s: Camera %d: Can't use preview callbacks "
729 "in state %d", __FUNCTION__, mCameraId, params.state);
730 return;
731 }
732 }
733
734 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
735 ALOGV("%s: setting oneshot", __FUNCTION__);
736 params.previewCallbackOneShot = true;
737 }
738 if (params.previewCallbackFlags != (uint32_t)flag) {
739
740 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
741 // Disable any existing preview callback window when enabling
742 // preview callback flags
743 res = mCallbackProcessor->setCallbackWindow(NULL);
744 if (res != OK) {
745 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
746 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
747 return;
748 }
749 params.previewCallbackSurface = false;
750 }
751
752 params.previewCallbackFlags = flag;
753
754 if (params.state == Parameters::PREVIEW) {
755 res = startPreviewL(params, true);
756 if (res != OK) {
757 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
758 __FUNCTION__, mCameraId,
759 Parameters::getStateName(params.state));
760 }
761 }
762 }
763 }
764
765 status_t Camera2Client::setPreviewCallbackTarget(const sp<SurfaceType>& target) {
766 ATRACE_CALL();
767 ALOGV("%s: E", __FUNCTION__);
768 Mutex::Autolock icl(mBinderSerializationLock);
769 status_t res;
770 if ((res = checkPid(__FUNCTION__)) != OK) return res;
771
772 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
773 sp<Surface> window = target;
774 #else
775 sp<Surface> window;
776 if (target != 0) {
777 window = new Surface(target);
778 }
779 #endif
780
781 res = mCallbackProcessor->setCallbackWindow(window);
782 if (res != OK) {
783 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", __FUNCTION__,
784 mCameraId, strerror(-res), res);
785 return res;
786 }
787
788 SharedParameters::Lock l(mParameters);
789
790 if (window != NULL) {
791 // Disable traditional callbacks when a valid callback target is given
792 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
793 l.mParameters.previewCallbackOneShot = false;
794 l.mParameters.previewCallbackSurface = true;
795 } else {
796 // Disable callback target if given a NULL interface.
797 l.mParameters.previewCallbackSurface = false;
798 }
799
800 switch (l.mParameters.state) {
801 case Parameters::PREVIEW:
802 res = startPreviewL(l.mParameters, true);
803 break;
804 case Parameters::RECORD:
805 case Parameters::VIDEO_SNAPSHOT:
806 res = startRecordingL(l.mParameters, true);
807 break;
808 default:
809 break;
810 }
811 if (res != OK) {
812 ALOGE("%s: Camera %d: Unable to refresh request in state %s", __FUNCTION__, mCameraId,
813 Parameters::getStateName(l.mParameters.state));
814 }
815
816 return OK;
817 }
818
819 status_t Camera2Client::startPreview() {
820 ATRACE_CALL();
821 ALOGV("%s: E", __FUNCTION__);
822 Mutex::Autolock icl(mBinderSerializationLock);
823 status_t res;
824 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
825 SharedParameters::Lock l(mParameters);
826 return startPreviewL(l.mParameters, false);
827 }
828
829 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
830 ATRACE_CALL();
831 status_t res;
832
833 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
834
835 if (params.state == Parameters::DISCONNECTED) {
836 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
837 return INVALID_OPERATION;
838 }
839 if ( (params.state == Parameters::PREVIEW ||
840 params.state == Parameters::RECORD ||
841 params.state == Parameters::VIDEO_SNAPSHOT)
842 && !restart) {
843 // Succeed attempt to re-enter a streaming state
844 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
845 __FUNCTION__, mCameraId);
846 return OK;
847 }
848 if (params.state > Parameters::PREVIEW && !restart) {
849 ALOGE("%s: Can't start preview in state %s",
850 __FUNCTION__,
851 Parameters::getStateName(params.state));
852 return INVALID_OPERATION;
853 }
854
855 if (!mStreamingProcessor->haveValidPreviewWindow()) {
856 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
857 return OK;
858 }
859 params.state = Parameters::STOPPED;
860 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
861
862 res = mStreamingProcessor->updatePreviewStream(params);
863 if (res != OK) {
864 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
865 __FUNCTION__, mCameraId, strerror(-res), res);
866 return res;
867 }
868
869 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
870
871 // We could wait to create the JPEG output stream until first actual use
872 // (first takePicture call). However, this would substantially increase the
873 // first capture latency on HAL3 devices.
874 // So create it unconditionally at preview start. As a drawback,
875 // this increases gralloc memory consumption for applications that don't
876 // ever take a picture. Do not enter this mode when jpeg stream will slow
877 // down preview.
878 // TODO: Find a better compromise, though this likely would involve HAL
879 // changes.
880 int lastJpegStreamId = mJpegProcessor->getStreamId();
881 // If jpeg stream will slow down preview, make sure we remove it before starting preview
882 if (params.slowJpegMode) {
883 if (lastJpegStreamId != NO_STREAM) {
884 // Pause preview if we are streaming
885 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
886 if (activeRequestId != 0) {
887 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
888 if (res != OK) {
889 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
890 __FUNCTION__, mCameraId, strerror(-res), res);
891 }
892 res = mDevice->waitUntilDrained();
893 if (res != OK) {
894 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
895 __FUNCTION__, mCameraId, strerror(-res), res);
896 }
897 }
898
899 res = mJpegProcessor->deleteStream();
900
901 if (res != OK) {
902 ALOGE("%s: Camera %d: delete Jpeg stream failed: %s (%d)",
903 __FUNCTION__, mCameraId, strerror(-res), res);
904 }
905
906 if (activeRequestId != 0) {
907 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
908 if (res != OK) {
909 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
910 __FUNCTION__, mCameraId, strerror(-res), res);
911 }
912 }
913 }
914 } else {
915 res = updateProcessorStream(mJpegProcessor, params);
916 if (res != OK) {
917 ALOGE("%s: Camera %d: Can't pre-configure still image "
918 "stream: %s (%d)",
919 __FUNCTION__, mCameraId, strerror(-res), res);
920 return res;
921 }
922 }
923 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
924
925 Vector<int32_t> outputStreams;
926 bool callbacksEnabled = (params.previewCallbackFlags &
927 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
928 params.previewCallbackSurface;
929
930 if (callbacksEnabled) {
931 // Can't have recording stream hanging around when enabling callbacks,
932 // since it exceeds the max stream count on some devices.
933 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
934 ALOGV("%s: Camera %d: Clearing out recording stream before "
935 "creating callback stream", __FUNCTION__, mCameraId);
936 res = mStreamingProcessor->stopStream();
937 if (res != OK) {
938 ALOGE("%s: Camera %d: Can't stop streaming to delete "
939 "recording stream", __FUNCTION__, mCameraId);
940 return res;
941 }
942 res = mStreamingProcessor->deleteRecordingStream();
943 if (res != OK) {
944 ALOGE("%s: Camera %d: Unable to delete recording stream before "
945 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
946 strerror(-res), res);
947 return res;
948 }
949 }
950
951 res = mCallbackProcessor->updateStream(params);
952 if (res != OK) {
953 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
954 __FUNCTION__, mCameraId, strerror(-res), res);
955 return res;
956 }
957 outputStreams.push(getCallbackStreamId());
958 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
959 /**
960 * Delete the unused callback stream when preview stream is changed and
961 * preview is not enabled. Don't need stop preview stream as preview is in
962 * STOPPED state now.
963 */
964 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
965 res = mCallbackProcessor->deleteStream();
966 if (res != OK) {
967 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
968 __FUNCTION__, mCameraId, strerror(-res), res);
969 return res;
970 }
971 }
972
973 if (params.useZeroShutterLag() &&
974 getRecordingStreamId() == NO_STREAM) {
975 res = updateProcessorStream(mZslProcessor, params);
976 if (res != OK) {
977 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
978 __FUNCTION__, mCameraId, strerror(-res), res);
979 return res;
980 }
981
982 if (jpegStreamChanged) {
983 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
984 __FUNCTION__, mCameraId);
985 mZslProcessor->clearZslQueue();
986 }
987 outputStreams.push(getZslStreamId());
988 } else {
989 mZslProcessor->deleteStream();
990 }
991
992 outputStreams.push(getPreviewStreamId());
993
994 if (params.isDeviceZslSupported) {
995 // If device ZSL is supported, resume preview buffers that may be paused
996 // during last takePicture().
997 mDevice->dropStreamBuffers(false, getPreviewStreamId());
998 }
999
1000 if (!params.recordingHint) {
1001 if (!restart) {
1002 res = mStreamingProcessor->updatePreviewRequest(params);
1003 if (res != OK) {
1004 ALOGE("%s: Camera %d: Can't set up preview request: "
1005 "%s (%d)", __FUNCTION__, mCameraId,
1006 strerror(-res), res);
1007 return res;
1008 }
1009 }
1010 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
1011 outputStreams);
1012 } else {
1013 if (!restart) {
1014 res = mStreamingProcessor->updateRecordingRequest(params);
1015 if (res != OK) {
1016 ALOGE("%s: Camera %d: Can't set up preview request with "
1017 "record hint: %s (%d)", __FUNCTION__, mCameraId,
1018 strerror(-res), res);
1019 return res;
1020 }
1021 }
1022 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1023 outputStreams);
1024 }
1025 if (res != OK) {
1026 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
1027 __FUNCTION__, mCameraId, strerror(-res), res);
1028 return res;
1029 }
1030
1031 mCallbackProcessor->unpauseCallback();
1032 params.state = Parameters::PREVIEW;
1033 return OK;
1034 }
1035
1036 void Camera2Client::stopPreview() {
1037 ATRACE_CALL();
1038 ALOGV("%s: E", __FUNCTION__);
1039 Mutex::Autolock icl(mBinderSerializationLock);
1040 status_t res;
1041 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1042 stopPreviewL();
1043 }
1044
1045 void Camera2Client::stopPreviewL() {
1046 ATRACE_CALL();
1047
1048 if (!mInitialized) {
1049 // If we haven't initialized yet, there's no stream to stop (b/379558387)
1050 return;
1051 }
1052
1053 status_t res;
1054 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
1055 Parameters::State state;
1056 {
1057 SharedParameters::Lock l(mParameters);
1058 state = l.mParameters.state;
1059 }
1060
1061 switch (state) {
1062 case Parameters::DISCONNECTED:
1063 // Nothing to do.
1064 break;
1065 case Parameters::STOPPED:
1066 case Parameters::VIDEO_SNAPSHOT:
1067 case Parameters::STILL_CAPTURE:
1068 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
1069 FALLTHROUGH_INTENDED;
1070 case Parameters::RECORD:
1071 case Parameters::PREVIEW:
1072 mCallbackProcessor->pauseCallback();
1073 syncWithDevice();
1074 // Due to flush a camera device sync is not a sufficient
1075 // guarantee that the current client parameters are
1076 // correctly applied. To resolve this wait for the current
1077 // request id to return in the results.
1078 waitUntilCurrentRequestIdLocked();
1079 res = stopStream();
1080 if (res != OK) {
1081 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1082 __FUNCTION__, mCameraId, strerror(-res), res);
1083 }
1084
1085 // Flush all in-process captures and buffer in order to stop
1086 // preview faster.
1087 res = mDevice->flush();
1088 if (res != OK) {
1089 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
1090 __FUNCTION__, mCameraId, strerror(-res), res);
1091 }
1092
1093 res = mDevice->waitUntilDrained();
1094 if (res != OK) {
1095 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1096 __FUNCTION__, mCameraId, strerror(-res), res);
1097 }
1098 // Clean up recording stream
1099 res = mStreamingProcessor->deleteRecordingStream();
1100 if (res != OK) {
1101 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1102 "stop preview: %s (%d)",
1103 __FUNCTION__, mCameraId, strerror(-res), res);
1104 }
1105 FALLTHROUGH_INTENDED;
1106 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
1107 SharedParameters::Lock l(mParameters);
1108 l.mParameters.state = Parameters::STOPPED;
1109 commandStopFaceDetectionL(l.mParameters);
1110 break;
1111 }
1112 default:
1113 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
1114 state);
1115 }
1116 }
1117
1118 bool Camera2Client::previewEnabled() {
1119 ATRACE_CALL();
1120 Mutex::Autolock icl(mBinderSerializationLock);
1121 status_t res;
1122 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
1123
1124 SharedParameters::Lock l(mParameters);
1125 return l.mParameters.state == Parameters::PREVIEW;
1126 }
1127
1128 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
1129 ATRACE_CALL();
1130 Mutex::Autolock icl(mBinderSerializationLock);
1131 status_t res;
1132 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1133
1134 SharedParameters::Lock l(mParameters);
1135 switch (l.mParameters.state) {
1136 case Parameters::RECORD:
1137 case Parameters::VIDEO_SNAPSHOT:
1138 ALOGE("%s: Camera %d: Can't be called in state %s",
1139 __FUNCTION__, mCameraId,
1140 Parameters::getStateName(l.mParameters.state));
1141 return INVALID_OPERATION;
1142 default:
1143 // OK
1144 break;
1145 }
1146
1147 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1148 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
1149 return BAD_VALUE;
1150 }
1151
1152 l.mParameters.videoBufferMode = videoBufferMode;
1153
1154 return OK;
1155 }
1156
1157 status_t Camera2Client::startRecording() {
1158 ATRACE_CALL();
1159 ALOGV("%s: E", __FUNCTION__);
1160 Mutex::Autolock icl(mBinderSerializationLock);
1161 status_t res;
1162 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1163 SharedParameters::Lock l(mParameters);
1164
1165 return startRecordingL(l.mParameters, false);
1166 }
1167
1168 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1169 status_t res = OK;
1170
1171 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1172
1173 switch (params.state) {
1174 case Parameters::STOPPED:
1175 res = startPreviewL(params, false);
1176 if (res != OK) return res;
1177 // Make sure first preview request is submitted to the HAL device to avoid
1178 // two consecutive set of configure_streams being called into the HAL.
1179 // TODO: Refactor this to avoid initial preview configuration.
1180 syncWithDevice();
1181 break;
1182 case Parameters::PREVIEW:
1183 // Ready to go
1184 break;
1185 case Parameters::RECORD:
1186 case Parameters::VIDEO_SNAPSHOT:
1187 // OK to call this when recording is already on, just skip unless
1188 // we're looking to restart
1189 if (!restart) return OK;
1190 break;
1191 default:
1192 ALOGE("%s: Camera %d: Can't start recording in state %s",
1193 __FUNCTION__, mCameraId,
1194 Parameters::getStateName(params.state));
1195 return INVALID_OPERATION;
1196 };
1197
1198 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1199 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1200 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1201 return INVALID_OPERATION;
1202 }
1203
1204 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1205 ALOGE("%s: No valid recording window", __FUNCTION__);
1206 return INVALID_OPERATION;
1207 }
1208
1209 if (!restart) {
1210 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1211 mStreamingProcessor->updateRecordingRequest(params);
1212 if (res != OK) {
1213 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1214 __FUNCTION__, mCameraId, strerror(-res), res);
1215 return res;
1216 }
1217 }
1218
1219 // Not all devices can support a preview callback stream and a recording
1220 // stream at the same time, so assume none of them can.
1221 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1222 ALOGV("%s: Camera %d: Clearing out callback stream before "
1223 "creating recording stream", __FUNCTION__, mCameraId);
1224 res = mStreamingProcessor->stopStream();
1225 if (res != OK) {
1226 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1227 __FUNCTION__, mCameraId);
1228 return res;
1229 }
1230 res = mCallbackProcessor->deleteStream();
1231 if (res != OK) {
1232 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1233 "record: %s (%d)", __FUNCTION__, mCameraId,
1234 strerror(-res), res);
1235 return res;
1236 }
1237 }
1238
1239 // Clean up ZSL before transitioning into recording
1240 if (mZslProcessor->getStreamId() != NO_STREAM) {
1241 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1242 "creating recording stream", __FUNCTION__, mCameraId);
1243 res = mStreamingProcessor->stopStream();
1244 if (res != OK) {
1245 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1246 __FUNCTION__, mCameraId);
1247 return res;
1248 }
1249 res = mDevice->waitUntilDrained();
1250 if (res != OK) {
1251 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1252 __FUNCTION__, mCameraId, strerror(-res), res);
1253 }
1254 res = mZslProcessor->clearZslQueue();
1255 if (res != OK) {
1256 ALOGE("%s: Camera %d: Can't clear zsl queue",
1257 __FUNCTION__, mCameraId);
1258 return res;
1259 }
1260 res = mZslProcessor->deleteStream();
1261 if (res != OK) {
1262 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1263 "record: %s (%d)", __FUNCTION__, mCameraId,
1264 strerror(-res), res);
1265 return res;
1266 }
1267 }
1268
1269 // Disable callbacks if they're enabled; can't record and use callbacks,
1270 // and we can't fail record start without stagefright asserting.
1271 params.previewCallbackFlags = 0;
1272
1273 // May need to reconfigure video snapshot JPEG sizes
1274 // during recording startup, so need a more complex sequence here to
1275 // ensure an early stream reconfiguration doesn't happen
1276 bool recordingStreamNeedsUpdate;
1277 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1278 if (res != OK) {
1279 ALOGE("%s: Camera %d: Can't query recording stream",
1280 __FUNCTION__, mCameraId);
1281 return res;
1282 }
1283
1284 if (recordingStreamNeedsUpdate) {
1285 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1286 // Right now camera device cannot handle configureStream failure gracefully
1287 // when device is streaming
1288 res = mStreamingProcessor->stopStream();
1289 if (res != OK) {
1290 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1291 "stream", __FUNCTION__, mCameraId);
1292 return res;
1293 }
1294 res = mDevice->waitUntilDrained();
1295 if (res != OK) {
1296 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1297 "%s (%d)", __FUNCTION__, mCameraId,
1298 strerror(-res), res);
1299 }
1300
1301 res = updateProcessorStream<
1302 StreamingProcessor,
1303 &StreamingProcessor::updateRecordingStream>(
1304 mStreamingProcessor,
1305 params);
1306 if (res != OK) {
1307 ALOGE("%s: Camera %d: Unable to update recording stream: "
1308 "%s (%d)", __FUNCTION__, mCameraId,
1309 strerror(-res), res);
1310 return res;
1311 }
1312 }
1313
1314 Vector<int32_t> outputStreams;
1315 outputStreams.push(getPreviewStreamId());
1316 outputStreams.push(getRecordingStreamId());
1317
1318 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1319 outputStreams);
1320
1321 // startStream might trigger a configureStream call and device might fail
1322 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1323 // to video size.
1324 if (res == BAD_VALUE) {
1325 overrideVideoSnapshotSize(params);
1326 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1327 outputStreams);
1328 }
1329
1330 if (res != OK) {
1331 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1332 __FUNCTION__, mCameraId, strerror(-res), res);
1333 return res;
1334 }
1335
1336 if (params.state < Parameters::RECORD) {
1337 params.state = Parameters::RECORD;
1338 }
1339
1340 return OK;
1341 }
1342
1343 void Camera2Client::stopRecording() {
1344 ATRACE_CALL();
1345 ALOGV("%s: E", __FUNCTION__);
1346 Mutex::Autolock icl(mBinderSerializationLock);
1347 SharedParameters::Lock l(mParameters);
1348
1349 status_t res;
1350 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1351
1352 switch (l.mParameters.state) {
1353 case Parameters::RECORD:
1354 // OK to stop
1355 break;
1356 case Parameters::STOPPED:
1357 case Parameters::PREVIEW:
1358 case Parameters::STILL_CAPTURE:
1359 case Parameters::VIDEO_SNAPSHOT:
1360 default:
1361 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1362 __FUNCTION__, mCameraId,
1363 Parameters::getStateName(l.mParameters.state));
1364 return;
1365 };
1366
1367 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1368
1369 // Remove recording stream because the video target may be abandoned soon.
1370 res = stopStream();
1371 if (res != OK) {
1372 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1373 __FUNCTION__, mCameraId, strerror(-res), res);
1374 }
1375
1376 res = mDevice->waitUntilDrained();
1377 if (res != OK) {
1378 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1379 __FUNCTION__, mCameraId, strerror(-res), res);
1380 }
1381 // Clean up recording stream
1382 res = mStreamingProcessor->deleteRecordingStream();
1383 if (res != OK) {
1384 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1385 "stop preview: %s (%d)",
1386 __FUNCTION__, mCameraId, strerror(-res), res);
1387 }
1388 l.mParameters.recoverOverriddenJpegSize();
1389
1390 // Restart preview
1391 res = startPreviewL(l.mParameters, true);
1392 if (res != OK) {
1393 ALOGE("%s: Camera %d: Unable to return to preview",
1394 __FUNCTION__, mCameraId);
1395 }
1396 }
1397
1398 bool Camera2Client::recordingEnabled() {
1399 ATRACE_CALL();
1400 Mutex::Autolock icl(mBinderSerializationLock);
1401
1402 if ( checkPid(__FUNCTION__) != OK) return false;
1403
1404 return recordingEnabledL();
1405 }
1406
1407 bool Camera2Client::recordingEnabledL() {
1408 ATRACE_CALL();
1409 SharedParameters::Lock l(mParameters);
1410
1411 return (l.mParameters.state == Parameters::RECORD
1412 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1413 }
1414
1415 void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
1416 ATRACE_CALL();
1417 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1418 }
1419
1420 void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
1421 ATRACE_CALL();
1422 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1423 }
1424
1425 void Camera2Client::releaseRecordingFrameHandleBatch(
1426 [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
1427 ATRACE_CALL();
1428 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1429 }
1430
1431 status_t Camera2Client::autoFocus() {
1432 ATRACE_CALL();
1433 Mutex::Autolock icl(mBinderSerializationLock);
1434 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1435 status_t res;
1436 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1437
1438 int triggerId;
1439 bool notifyImmediately = false;
1440 bool notifySuccess = false;
1441 {
1442 SharedParameters::Lock l(mParameters);
1443 if (l.mParameters.state < Parameters::PREVIEW) {
1444 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1445 __FUNCTION__, mCameraId, l.mParameters.state);
1446 return INVALID_OPERATION;
1447 }
1448
1449 /**
1450 * If the camera does not support auto-focus, it is a no-op and
1451 * onAutoFocus(boolean, Camera) callback will be called immediately
1452 * with a fake value of success set to true.
1453 *
1454 * Similarly, if focus mode is set to INFINITY, there's no reason to
1455 * bother the HAL.
1456 */
1457 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1458 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1459 notifyImmediately = true;
1460 notifySuccess = true;
1461 }
1462 /**
1463 * If we're in CAF mode, and AF has already been locked, just fire back
1464 * the callback right away; the HAL would not send a notification since
1465 * no state change would happen on a AF trigger.
1466 */
1467 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1468 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1469 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1470 notifyImmediately = true;
1471 notifySuccess = true;
1472 }
1473 /**
1474 * Send immediate notification back to client
1475 */
1476 if (notifyImmediately) {
1477 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1478 if (l.mRemoteCallback != 0) {
1479 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1480 notifySuccess ? 1 : 0, 0);
1481 }
1482 return OK;
1483 }
1484 /**
1485 * Handle quirk mode for AF in scene modes
1486 */
1487 if (l.mParameters.quirks.triggerAfWithAuto &&
1488 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1489 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1490 !l.mParameters.focusingAreas[0].isEmpty()) {
1491 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1492 __FUNCTION__, l.mParameters.focusMode);
1493 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1494 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1495 updateRequests(l.mParameters);
1496 }
1497
1498 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1499 triggerId = l.mParameters.currentAfTriggerId;
1500 }
1501 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1502
1503 syncWithDevice();
1504
1505 mDevice->triggerAutofocus(triggerId);
1506
1507 return OK;
1508 }
1509
1510 status_t Camera2Client::cancelAutoFocus() {
1511 ATRACE_CALL();
1512 Mutex::Autolock icl(mBinderSerializationLock);
1513 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1514 status_t res;
1515 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1516
1517 int triggerId;
1518 {
1519 SharedParameters::Lock l(mParameters);
1520 if (l.mParameters.state == Parameters::DISCONNECTED) {
1521 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
1522 return INVALID_OPERATION;
1523 }
1524
1525 // Canceling does nothing in FIXED or INFINITY modes
1526 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1527 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1528 return OK;
1529 }
1530
1531 // An active AF trigger is canceled
1532 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1533 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1534 }
1535
1536 triggerId = ++l.mParameters.afTriggerCounter;
1537
1538 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1539 // the real state at this point. No need to cancel explicitly if
1540 // changing the AF mode.
1541 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1542 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1543 l.mParameters.shadowFocusMode);
1544 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1545 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1546 updateRequests(l.mParameters);
1547
1548 return OK;
1549 }
1550 if (l.mParameters.allowZslMode) {
1551 mZslProcessor->clearZslQueue();
1552 }
1553 }
1554 syncWithDevice();
1555
1556 mDevice->triggerCancelAutofocus(triggerId);
1557
1558 return OK;
1559 }
1560
1561 status_t Camera2Client::takePicture(int /*msgType*/) {
1562 ATRACE_CALL();
1563 Mutex::Autolock icl(mBinderSerializationLock);
1564 status_t res;
1565 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1566
1567 int takePictureCounter;
1568 bool shouldSyncWithDevice = true;
1569 {
1570 SharedParameters::Lock l(mParameters);
1571 switch (l.mParameters.state) {
1572 case Parameters::DISCONNECTED:
1573 case Parameters::STOPPED:
1574 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1575 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1576 __FUNCTION__, mCameraId);
1577 return INVALID_OPERATION;
1578 case Parameters::PREVIEW:
1579 // Good to go for takePicture
1580 res = commandStopFaceDetectionL(l.mParameters);
1581 if (res != OK) {
1582 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1583 __FUNCTION__, mCameraId);
1584 return res;
1585 }
1586 l.mParameters.state = Parameters::STILL_CAPTURE;
1587
1588 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1589 if (l.mParameters.isJpegSizeOverridden() &&
1590 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1591 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1592 if (res != OK) {
1593 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1594 __FUNCTION__, mCameraId, strerror(-res), res);
1595 }
1596 res = mDevice->waitUntilDrained();
1597 if (res != OK) {
1598 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1599 __FUNCTION__, mCameraId, strerror(-res), res);
1600 }
1601 // Clean up recording stream
1602 res = mStreamingProcessor->deleteRecordingStream();
1603 if (res != OK) {
1604 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1605 "stop preview: %s (%d)",
1606 __FUNCTION__, mCameraId, strerror(-res), res);
1607 }
1608 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1609 if (res != OK) {
1610 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1611 __FUNCTION__, mCameraId, strerror(-res), res);
1612 }
1613 l.mParameters.recoverOverriddenJpegSize();
1614 }
1615 break;
1616 case Parameters::RECORD:
1617 // Good to go for video snapshot
1618 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1619 break;
1620 case Parameters::STILL_CAPTURE:
1621 case Parameters::VIDEO_SNAPSHOT:
1622 ALOGE("%s: Camera %d: Already taking a picture",
1623 __FUNCTION__, mCameraId);
1624 return INVALID_OPERATION;
1625 }
1626
1627 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1628 int lastJpegStreamId = mJpegProcessor->getStreamId();
1629 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1630 if (!l.mParameters.slowJpegMode) {
1631 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1632 }
1633
1634 // If video snapshot fail to configureStream, try override video snapshot size to
1635 // video size
1636 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1637 overrideVideoSnapshotSize(l.mParameters);
1638 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1639 }
1640 if (res != OK) {
1641 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1642 __FUNCTION__, mCameraId, strerror(-res), res);
1643 return res;
1644 }
1645 takePictureCounter = ++l.mParameters.takePictureCounter;
1646
1647 // Clear ZSL buffer queue when Jpeg size is changed.
1648 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1649 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1650 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1651 __FUNCTION__, mCameraId);
1652 mZslProcessor->clearZslQueue();
1653 }
1654
1655 // We should always sync with the device in case flash is turned on,
1656 // the camera device suggests that flash is needed (AE state FLASH_REQUIRED)
1657 // or we are in some other AE state different from CONVERGED that may need
1658 // precapture trigger.
1659 if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON &&
1660 (l.mParameters.aeState == ANDROID_CONTROL_AE_STATE_CONVERGED)) {
1661 shouldSyncWithDevice = false;
1662 }
1663 }
1664
1665 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1666
1667 // Make sure HAL has correct settings in case precapture trigger is needed.
1668 if (shouldSyncWithDevice) {
1669 syncWithDevice();
1670 }
1671
1672 res = mCaptureSequencer->startCapture();
1673 if (res != OK) {
1674 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1675 __FUNCTION__, mCameraId, strerror(-res), res);
1676 }
1677
1678 return res;
1679 }
1680
1681 status_t Camera2Client::setParameters(const String8& params) {
1682 ATRACE_CALL();
1683 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1684 Mutex::Autolock icl(mBinderSerializationLock);
1685 status_t res;
1686 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1687
1688 SharedParameters::Lock l(mParameters);
1689
1690 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1691 res = l.mParameters.set(params);
1692 if (res != OK) return res;
1693 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1694
1695 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1696 mZslProcessor->clearZslQueue();
1697 }
1698
1699 res = updateRequests(l.mParameters);
1700
1701 return res;
1702 }
1703
1704 String8 Camera2Client::getParameters() const {
1705 ATRACE_CALL();
1706 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1707 Mutex::Autolock icl(mBinderSerializationLock);
1708 // The camera service can unconditionally get the parameters at all times
1709 if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1710
1711 SharedParameters::ReadLock l(mParameters);
1712
1713 return l.mParameters.get();
1714 }
1715
1716 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1717 ATRACE_CALL();
1718 Mutex::Autolock icl(mBinderSerializationLock);
1719 status_t res;
1720 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1721
1722 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1723 cmd, arg1, arg2);
1724
1725 switch (cmd) {
1726 case CAMERA_CMD_START_SMOOTH_ZOOM:
1727 return commandStartSmoothZoomL();
1728 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1729 return commandStopSmoothZoomL();
1730 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1731 return commandSetDisplayOrientationL(arg1);
1732 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1733 return commandEnableShutterSoundL(arg1 == 1);
1734 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1735 return commandPlayRecordingSoundL();
1736 case CAMERA_CMD_START_FACE_DETECTION:
1737 return commandStartFaceDetectionL(arg1);
1738 case CAMERA_CMD_STOP_FACE_DETECTION: {
1739 SharedParameters::Lock l(mParameters);
1740 return commandStopFaceDetectionL(l.mParameters);
1741 }
1742 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1743 return commandEnableFocusMoveMsgL(arg1 == 1);
1744 case CAMERA_CMD_PING:
1745 return commandPingL();
1746 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1747 case CAMERA_CMD_SET_VIDEO_FORMAT:
1748 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1749 __FUNCTION__, cmd, arg1, arg2);
1750 return BAD_VALUE;
1751 default:
1752 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1753 __FUNCTION__, cmd, arg1, arg2);
1754 return BAD_VALUE;
1755 }
1756 }
1757
1758 status_t Camera2Client::commandStartSmoothZoomL() {
1759 ALOGE("%s: Unimplemented!", __FUNCTION__);
1760 return OK;
1761 }
1762
1763 status_t Camera2Client::commandStopSmoothZoomL() {
1764 ALOGE("%s: Unimplemented!", __FUNCTION__);
1765 return OK;
1766 }
1767
1768 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1769 int transform = Parameters::degToTransform(degrees,
1770 mCameraFacing == CAMERA_FACING_FRONT);
1771 if (transform == -1) {
1772 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1773 __FUNCTION__, mCameraId, degrees);
1774 return BAD_VALUE;
1775 }
1776 {
1777 Mutex::Autolock icl(mRotateAndCropLock);
1778 if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
1779 ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
1780 mRotateAndCropMode);
1781 transform = mRotateAndCropPreviewTransform;
1782 }
1783 }
1784 SharedParameters::Lock l(mParameters);
1785 if (transform != l.mParameters.previewTransform &&
1786 getPreviewStreamId() != NO_STREAM) {
1787 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1788 }
1789 l.mParameters.previewTransform = transform;
1790 return OK;
1791 }
1792
1793 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1794 SharedParameters::Lock l(mParameters);
1795 if (enable) {
1796 l.mParameters.playShutterSound = true;
1797 return OK;
1798 }
1799
1800 l.mParameters.playShutterSound = false;
1801 return OK;
1802 }
1803
1804 status_t Camera2Client::commandPlayRecordingSoundL() {
1805 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1806 return OK;
1807 }
1808
1809 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1810 ALOGV("%s: Camera %d: Starting face detection",
1811 __FUNCTION__, mCameraId);
1812 status_t res;
1813 SharedParameters::Lock l(mParameters);
1814 switch (l.mParameters.state) {
1815 case Parameters::DISCONNECTED:
1816 case Parameters::STOPPED:
1817 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1818 case Parameters::STILL_CAPTURE:
1819 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1820 __FUNCTION__, mCameraId);
1821 return INVALID_OPERATION;
1822 case Parameters::PREVIEW:
1823 case Parameters::RECORD:
1824 case Parameters::VIDEO_SNAPSHOT:
1825 // Good to go for starting face detect
1826 break;
1827 }
1828 // Ignoring type
1829 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1830 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1831 ALOGE("%s: Camera %d: Face detection not supported",
1832 __FUNCTION__, mCameraId);
1833 return BAD_VALUE;
1834 }
1835 if (l.mParameters.enableFaceDetect) return OK;
1836
1837 l.mParameters.enableFaceDetect = true;
1838
1839 res = updateRequests(l.mParameters);
1840
1841 return res;
1842 }
1843
1844 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1845 status_t res = OK;
1846 ALOGV("%s: Camera %d: Stopping face detection",
1847 __FUNCTION__, mCameraId);
1848
1849 if (!params.enableFaceDetect) return OK;
1850
1851 params.enableFaceDetect = false;
1852
1853 if (params.state == Parameters::PREVIEW
1854 || params.state == Parameters::RECORD
1855 || params.state == Parameters::VIDEO_SNAPSHOT) {
1856 res = updateRequests(params);
1857 }
1858
1859 return res;
1860 }
1861
1862 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1863 SharedParameters::Lock l(mParameters);
1864 l.mParameters.enableFocusMoveMessages = enable;
1865
1866 return OK;
1867 }
1868
1869 status_t Camera2Client::commandPingL() {
1870 // Always ping back if access is proper and device is alive
1871 SharedParameters::Lock l(mParameters);
1872 if (l.mParameters.state != Parameters::DISCONNECTED) {
1873 return OK;
1874 } else {
1875 return NO_INIT;
1876 }
1877 }
1878
1879 void Camera2Client::notifyError(int32_t errorCode,
1880 const CaptureResultExtras& resultExtras) {
1881 int32_t err = CAMERA_ERROR_UNKNOWN;
1882 switch(errorCode) {
1883 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1884 err = CAMERA_ERROR_RELEASED;
1885 break;
1886 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1887 err = CAMERA_ERROR_UNKNOWN;
1888 break;
1889 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1890 err = CAMERA_ERROR_SERVER_DIED;
1891 break;
1892 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1893 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1894 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1895 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1896 __FUNCTION__, errorCode, resultExtras.requestId);
1897
1898 if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
1899 (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
1900 Mutex::Autolock al(mLatestRequestMutex);
1901
1902 mLatestFailedRequestIds.add(resultExtras.requestId);
1903 mLatestRequestSignal.signal();
1904 }
1905 mCaptureSequencer->notifyError(errorCode, resultExtras);
1906 return;
1907 default:
1908 err = CAMERA_ERROR_UNKNOWN;
1909 break;
1910 }
1911
1912 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1913 resultExtras.requestId);
1914
1915 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1916 if (l.mRemoteCallback != nullptr) {
1917 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1918 }
1919 }
1920
1921
1922 /** Device-related methods */
1923 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1924 ALOGV("%s: Autofocus state now %d, last trigger %d",
1925 __FUNCTION__, newState, triggerId);
1926 bool sendCompletedMessage = false;
1927 bool sendMovingMessage = false;
1928
1929 bool success = false;
1930 bool afInMotion = false;
1931 {
1932 SharedParameters::Lock l(mParameters);
1933 // Trace end of AF state
1934 char tmp[32];
1935 if (l.mParameters.afStateCounter > 0) {
1936 camera_metadata_enum_snprint(
1937 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1938 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1939 }
1940
1941 // Update state
1942 l.mParameters.focusState = newState;
1943 l.mParameters.afStateCounter++;
1944
1945 // Trace start of AF state
1946
1947 camera_metadata_enum_snprint(
1948 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1949 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1950
1951 switch (l.mParameters.focusMode) {
1952 case Parameters::FOCUS_MODE_AUTO:
1953 case Parameters::FOCUS_MODE_MACRO:
1954 // Don't send notifications upstream if they're not for the current AF
1955 // trigger. For example, if cancel was called in between, or if we
1956 // already sent a notification about this AF call.
1957 if (triggerId != l.mParameters.currentAfTriggerId) break;
1958 switch (newState) {
1959 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1960 success = true;
1961 FALLTHROUGH_INTENDED;
1962 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1963 sendCompletedMessage = true;
1964 l.mParameters.currentAfTriggerId = -1;
1965 break;
1966 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1967 // Just starting focusing, ignore
1968 break;
1969 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1970 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1971 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1972 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1973 default:
1974 // Unexpected in AUTO/MACRO mode
1975 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1976 __FUNCTION__, newState);
1977 break;
1978 }
1979 break;
1980 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1981 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1982 switch (newState) {
1983 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1984 success = true;
1985 FALLTHROUGH_INTENDED;
1986 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1987 // Don't send notifications upstream if they're not for
1988 // the current AF trigger. For example, if cancel was
1989 // called in between, or if we already sent a
1990 // notification about this AF call.
1991 // Send both a 'AF done' callback and a 'AF move' callback
1992 if (triggerId != l.mParameters.currentAfTriggerId) break;
1993 sendCompletedMessage = true;
1994 afInMotion = false;
1995 if (l.mParameters.enableFocusMoveMessages &&
1996 l.mParameters.afInMotion) {
1997 sendMovingMessage = true;
1998 }
1999 l.mParameters.currentAfTriggerId = -1;
2000 break;
2001 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2002 // Cancel was called, or we switched state; care if
2003 // currently moving
2004 afInMotion = false;
2005 if (l.mParameters.enableFocusMoveMessages &&
2006 l.mParameters.afInMotion) {
2007 sendMovingMessage = true;
2008 }
2009 break;
2010 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2011 // Start passive scan, inform upstream
2012 afInMotion = true;
2013 FALLTHROUGH_INTENDED;
2014 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2015 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
2016 // Stop passive scan, inform upstream
2017 if (l.mParameters.enableFocusMoveMessages) {
2018 sendMovingMessage = true;
2019 }
2020 break;
2021 }
2022 l.mParameters.afInMotion = afInMotion;
2023 break;
2024 case Parameters::FOCUS_MODE_EDOF:
2025 case Parameters::FOCUS_MODE_INFINITY:
2026 case Parameters::FOCUS_MODE_FIXED:
2027 default:
2028 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
2029 ALOGE("%s: Unexpected AF state change %d "
2030 "(ID %d) in focus mode %d",
2031 __FUNCTION__, newState, triggerId,
2032 l.mParameters.focusMode);
2033 }
2034 }
2035 }
2036 if (sendMovingMessage) {
2037 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
2038 if (l.mRemoteCallback != 0) {
2039 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
2040 afInMotion ? 1 : 0, 0);
2041 }
2042 }
2043 if (sendCompletedMessage) {
2044 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
2045 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
2046 if (l.mRemoteCallback != 0) {
2047 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
2048 success ? 1 : 0, 0);
2049 }
2050 }
2051 }
2052
2053 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
2054 ALOGV("%s: Autoexposure state now %d, last trigger %d",
2055 __FUNCTION__, newState, triggerId);
2056 {
2057 SharedParameters::Lock l(mParameters);
2058 // Update state
2059 l.mParameters.aeState = newState;
2060 }
2061 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
2062 }
2063
2064 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
2065 nsecs_t timestamp) {
2066 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
2067 __FUNCTION__, resultExtras.requestId, timestamp);
2068 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
2069
2070 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
2071 }
2072
2073 camera2::SharedParameters& Camera2Client::getParameters() {
2074 return mParameters;
2075 }
2076
2077 int Camera2Client::getPreviewStreamId() const {
2078 return mStreamingProcessor->getPreviewStreamId();
2079 }
2080
2081 int Camera2Client::getCaptureStreamId() const {
2082 return mJpegProcessor->getStreamId();
2083 }
2084
2085 int Camera2Client::getCallbackStreamId() const {
2086 return mCallbackProcessor->getStreamId();
2087 }
2088
2089 int Camera2Client::getRecordingStreamId() const {
2090 return mStreamingProcessor->getRecordingStreamId();
2091 }
2092
2093 int Camera2Client::getZslStreamId() const {
2094 return mZslProcessor->getStreamId();
2095 }
2096
2097 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
2098 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
2099 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
2100 }
2101
2102 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
2103 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
2104 return mFrameProcessor->removeListener(minId, maxId, listener);
2105 }
2106
2107 status_t Camera2Client::stopStream() {
2108 return mStreamingProcessor->stopStream();
2109 }
2110
2111 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
2112 status_t res = OK;
2113 int lastJpegStreamId = mJpegProcessor->getStreamId();
2114 if (lastJpegStreamId != NO_STREAM) {
2115 return INVALID_OPERATION;
2116 }
2117
2118 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2119 if (res != OK) {
2120 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2121 __FUNCTION__, mCameraId, strerror(-res), res);
2122 return res;
2123 }
2124
2125 res = mDevice->flush();
2126 if (res != OK) {
2127 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
2128 __FUNCTION__, mCameraId, strerror(-res), res);
2129 return res;
2130 }
2131
2132 // Ideally we don't need this, but current camera device
2133 // status tracking mechanism demands it.
2134 res = mDevice->waitUntilDrained();
2135 if (res != OK) {
2136 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
2137 __FUNCTION__, mCameraId, strerror(-res), res);
2138 }
2139
2140 res = updateProcessorStream(mJpegProcessor, params);
2141 return res;
2142 }
2143
2144 const int32_t Camera2Client::kPreviewRequestIdStart;
2145 const int32_t Camera2Client::kPreviewRequestIdEnd;
2146 const int32_t Camera2Client::kRecordingRequestIdStart;
2147 const int32_t Camera2Client::kRecordingRequestIdEnd;
2148 const int32_t Camera2Client::kCaptureRequestIdStart;
2149 const int32_t Camera2Client::kCaptureRequestIdEnd;
2150
2151 /** Utility methods */
2152
2153 status_t Camera2Client::updateRequests(Parameters ¶ms) {
2154 status_t res;
2155
2156 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
2157
2158 res = mStreamingProcessor->incrementStreamingIds();
2159 if (res != OK) {
2160 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
2161 __FUNCTION__, mCameraId, strerror(-res), res);
2162 return res;
2163 }
2164
2165 res = mStreamingProcessor->updatePreviewRequest(params);
2166 if (res != OK) {
2167 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
2168 __FUNCTION__, mCameraId, strerror(-res), res);
2169 return res;
2170 }
2171 res = mStreamingProcessor->updateRecordingRequest(params);
2172 if (res != OK) {
2173 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
2174 __FUNCTION__, mCameraId, strerror(-res), res);
2175 return res;
2176 }
2177
2178 if (params.state == Parameters::PREVIEW) {
2179 res = startPreviewL(params, true);
2180 if (res != OK) {
2181 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2182 __FUNCTION__, mCameraId, strerror(-res), res);
2183 return res;
2184 }
2185 } else if (params.state == Parameters::RECORD ||
2186 params.state == Parameters::VIDEO_SNAPSHOT) {
2187 res = startRecordingL(params, true);
2188 if (res != OK) {
2189 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2190 __FUNCTION__, mCameraId, strerror(-res), res);
2191 return res;
2192 }
2193 }
2194 return res;
2195 }
2196
2197
2198 size_t Camera2Client::calculateBufferSize(int width, int height,
2199 int format, int stride) {
2200 switch (format) {
2201 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2202 return width * height * 2;
2203 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2204 return width * height * 3 / 2;
2205 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2206 return width * height * 2;
2207 case HAL_PIXEL_FORMAT_YV12: { // YV12
2208 size_t ySize = stride * height;
2209 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2210 size_t uvSize = uvStride * height / 2;
2211 return ySize + uvSize * 2;
2212 }
2213 case HAL_PIXEL_FORMAT_RGB_565:
2214 return width * height * 2;
2215 case HAL_PIXEL_FORMAT_RGBA_8888:
2216 return width * height * 4;
2217 case HAL_PIXEL_FORMAT_RAW16:
2218 return width * height * 2;
2219 default:
2220 ALOGE("%s: Unknown preview format: %x",
2221 __FUNCTION__, format);
2222 return 0;
2223 }
2224 }
2225
2226 status_t Camera2Client::syncWithDevice() {
2227 ATRACE_CALL();
2228 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2229 status_t res;
2230
2231 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2232 if (activeRequestId == 0) return OK;
2233
2234 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2235 if (res == TIMED_OUT) {
2236 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2237 __FUNCTION__, mCameraId);
2238 } else if (res != OK) {
2239 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2240 __FUNCTION__, mCameraId);
2241 }
2242 return res;
2243 }
2244
2245 template <typename ProcessorT>
2246 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2247 camera2::Parameters params) {
2248 // No default template arguments until C++11, so we need this overload
2249 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2250 processor, params);
2251 }
2252
2253 template <typename ProcessorT,
2254 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
2255 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2256 Parameters params) {
2257 status_t res;
2258
2259 // Get raw pointer since sp<T> doesn't have operator->*
2260 ProcessorT *processorPtr = processor.get();
2261 res = (processorPtr->*updateStreamF)(params);
2262
2263 /**
2264 * Can't update the stream if it's busy?
2265 *
2266 * Then we need to stop the device (by temporarily clearing the request
2267 * queue) and then try again. Resume streaming once we're done.
2268 */
2269 if (res == -EBUSY) {
2270 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2271 mCameraId);
2272 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2273 if (res != OK) {
2274 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2275 __FUNCTION__, mCameraId, strerror(-res), res);
2276 }
2277
2278 res = mDevice->waitUntilDrained();
2279 if (res != OK) {
2280 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2281 __FUNCTION__, mCameraId, strerror(-res), res);
2282 }
2283
2284 res = (processorPtr->*updateStreamF)(params);
2285 if (res != OK) {
2286 ALOGE("%s: Camera %d: Failed to update processing stream "
2287 " despite having halted streaming first: %s (%d)",
2288 __FUNCTION__, mCameraId, strerror(-res), res);
2289 }
2290
2291 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2292 if (res != OK) {
2293 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2294 __FUNCTION__, mCameraId, strerror(-res), res);
2295 }
2296 }
2297
2298 return res;
2299 }
2300
2301 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2302 ALOGV("%s: Camera %d: configure still size to video size before recording"
2303 , __FUNCTION__, mCameraId);
2304 params.overrideJpegSizeByVideoSize();
2305 status_t res = updateProcessorStream(mJpegProcessor, params);
2306 if (res != OK) {
2307 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2308 __FUNCTION__, mCameraId, strerror(-res), res);
2309 }
2310 return res;
2311 }
2312
2313 status_t Camera2Client::setVideoTarget(const sp<SurfaceType>& target) {
2314 ATRACE_CALL();
2315 ALOGV("%s: E", __FUNCTION__);
2316 Mutex::Autolock icl(mBinderSerializationLock);
2317 status_t res;
2318 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2319
2320 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
2321 uint64_t videoSurfaceID;
2322 res = target->getUniqueId(&videoSurfaceID);
2323 if (res != OK) {
2324 ALOGE("%s: Camera %d: Could not getUniqueId in setVideoTarget.", __FUNCTION__, mCameraId);
2325 return res;
2326 }
2327 if (videoSurfaceID == mVideoSurfaceID) {
2328 ALOGE("%s: Camera %d: New video window is same as old video window", __FUNCTION__,
2329 mCameraId);
2330 return NO_ERROR;
2331 }
2332 #else
2333 sp<IBinder> binder = IInterface::asBinder(target);
2334 if (binder == mVideoSurface) {
2335 ALOGV("%s: Camera %d: New video window is same as old video window",
2336 __FUNCTION__, mCameraId);
2337 return NO_ERROR;
2338 }
2339 #endif
2340
2341 sp<Surface> window;
2342 int format;
2343 android_dataspace dataSpace;
2344
2345 if (target != nullptr) {
2346 // Using controlledByApp flag to ensure that the buffer queue remains in
2347 // async mode for the old camera API, where many applications depend
2348 // on that behavior.
2349 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
2350 window = new Surface(target->getIGraphicBufferProducer(), /*controlledByApp*/ true);
2351 #else
2352 window = new Surface(target, /*controlledByApp*/ true);
2353 #endif
2354
2355 ANativeWindow *anw = window.get();
2356
2357 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2358 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2359 return res;
2360 }
2361
2362 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2363 reinterpret_cast<int*>(&dataSpace))) != OK) {
2364 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2365 return res;
2366 }
2367 }
2368
2369 Parameters::State state;
2370 {
2371 SharedParameters::Lock l(mParameters);
2372 state = l.mParameters.state;
2373 }
2374
2375 switch (state) {
2376 case Parameters::STOPPED:
2377 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2378 case Parameters::PREVIEW:
2379 // OK
2380 break;
2381 case Parameters::DISCONNECTED:
2382 case Parameters::RECORD:
2383 case Parameters::STILL_CAPTURE:
2384 case Parameters::VIDEO_SNAPSHOT:
2385 default:
2386 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2387 __FUNCTION__, mCameraId,
2388 Parameters::getStateName(state));
2389 return INVALID_OPERATION;
2390 }
2391
2392 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
2393 mVideoSurfaceID = videoSurfaceID;
2394 #else
2395 mVideoSurface = binder;
2396 #endif
2397 res = mStreamingProcessor->setRecordingWindow(window);
2398 if (res != OK) {
2399 ALOGE("%s: Unable to set new recording window: %s (%d)",
2400 __FUNCTION__, strerror(-res), res);
2401 return res;
2402 }
2403
2404 {
2405 SharedParameters::Lock l(mParameters);
2406 l.mParameters.videoFormat = format;
2407 l.mParameters.videoDataSpace = dataSpace;
2408 }
2409
2410 return OK;
2411 }
2412
2413 status_t Camera2Client::setAudioRestriction(int /*mode*/) {
2414 // Empty implementation. setAudioRestriction is hidden interface and not
2415 // supported by android.hardware.Camera API
2416 return INVALID_OPERATION;
2417 }
2418
2419 int32_t Camera2Client::getGlobalAudioRestriction() {
2420 // Empty implementation. getAudioRestriction is hidden interface and not
2421 // supported by android.hardware.Camera API
2422 return INVALID_OPERATION;
2423 }
2424
2425 status_t Camera2Client::setCameraServiceWatchdog(bool enabled) {
2426 return mDevice->setCameraServiceWatchdog(enabled);
2427 }
2428
2429 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal) {
2430 if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
2431
2432 {
2433 Mutex::Autolock icl(mRotateAndCropLock);
2434 if (mRotateAndCropIsSupported) {
2435 mRotateAndCropMode = rotateAndCrop;
2436 } else {
2437 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2438 return OK;
2439 }
2440 }
2441
2442 return mDevice->setRotateAndCropAutoBehavior(
2443 static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop), fromHal);
2444 }
2445
2446 status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
2447 if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
2448
2449 return mDevice->setAutoframingAutoBehavior(
2450 static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
2451 }
2452
2453 bool Camera2Client::supportsCameraMute() {
2454 return mDevice->supportsCameraMute();
2455 }
2456
2457 status_t Camera2Client::setCameraMute(bool enabled) {
2458 return mDevice->setCameraMute(enabled);
2459 }
2460
2461 void Camera2Client::setStreamUseCaseOverrides(
2462 const std::vector<int64_t>& useCaseOverrides) {
2463 mDevice->setStreamUseCaseOverrides(useCaseOverrides);
2464 }
2465
2466 void Camera2Client::clearStreamUseCaseOverrides() {
2467 mDevice->clearStreamUseCaseOverrides();
2468 }
2469
2470 bool Camera2Client::supportsZoomOverride() {
2471 return mDevice->supportsZoomOverride();
2472 }
2473
2474 status_t Camera2Client::setZoomOverride(int zoomOverride) {
2475 return mDevice->setZoomOverride(zoomOverride);
2476 }
2477
2478 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
2479 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2480 if (activeRequestId != 0) {
2481 auto res = waitUntilRequestIdApplied(activeRequestId,
2482 mDevice->getExpectedInFlightDuration());
2483 if (res == TIMED_OUT) {
2484 ALOGE("%s: Camera %d: Timed out waiting for current request id to return in results!",
2485 __FUNCTION__, mCameraId);
2486 return res;
2487 } else if (res != OK) {
2488 ALOGE("%s: Camera %d: Error while waiting for current request id to return in results!",
2489 __FUNCTION__, mCameraId);
2490 return res;
2491 }
2492 }
2493
2494 return OK;
2495 }
2496
2497 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
2498 Mutex::Autolock l(mLatestRequestMutex);
2499 while ((std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) ==
2500 mLatestRequestIds.end()) &&
2501 (std::find(mLatestFailedRequestIds.begin(), mLatestFailedRequestIds.end(), requestId) ==
2502 mLatestFailedRequestIds.end())) {
2503 nsecs_t startTime = systemTime();
2504
2505 auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
2506 if (res != OK) return res;
2507
2508 timeout -= (systemTime() - startTime);
2509 }
2510
2511 return (std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) !=
2512 mLatestRequestIds.end()) ? OK : DEAD_OBJECT;
2513 }
2514
2515 void Camera2Client::notifyRequestId(int32_t requestId) {
2516 Mutex::Autolock al(mLatestRequestMutex);
2517
2518 mLatestRequestIds.add(requestId);
2519 mLatestRequestSignal.signal();
2520 }
2521
2522 const char* Camera2Client::kAutofocusLabel = "autofocus";
2523 const char* Camera2Client::kTakepictureLabel = "take_picture";
2524
2525 } // namespace android
2526