1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <sstream>
22
23 #include <inttypes.h>
24 #include <utils/Log.h>
25 #include <utils/Trace.h>
26
27 #include <camera/CameraUtils.h>
28 #include <camera/StringUtils.h>
29 #include <cutils/properties.h>
30 #include <gui/Surface.h>
31 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
32
33 #include "api1/Camera2Client.h"
34
35 #include "api1/client2/StreamingProcessor.h"
36 #include "api1/client2/JpegProcessor.h"
37 #include "api1/client2/CaptureSequencer.h"
38 #include "api1/client2/CallbackProcessor.h"
39 #include "api1/client2/ZslProcessor.h"
40 #include "device3/RotateAndCropMapper.h"
41 #include "utils/CameraServiceProxyWrapper.h"
42
43 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
44 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
45
46 #ifndef FALLTHROUGH_INTENDED
47 #define FALLTHROUGH_INTENDED [[fallthrough]]
48 #endif
49
50 namespace android {
51 using namespace camera2;
52
53 // Interface used by CameraService
54
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,const std::string & clientPackageName,const std::optional<std::string> & clientFeatureId,const std::string & cameraDeviceId,int api1CameraId,int cameraFacing,int sensorOrientation,int clientPid,uid_t clientUid,int servicePid,bool overrideForPerfClass,int rotationOverride,bool forceSlowJpegMode)55 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
56 const sp<hardware::ICameraClient>& cameraClient,
57 std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
58 std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
59 const std::string& clientPackageName,
60 const std::optional<std::string>& clientFeatureId,
61 const std::string& cameraDeviceId,
62 int api1CameraId,
63 int cameraFacing,
64 int sensorOrientation,
65 int clientPid,
66 uid_t clientUid,
67 int servicePid,
68 bool overrideForPerfClass,
69 int rotationOverride,
70 bool forceSlowJpegMode):
71 Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
72 attributionAndPermissionUtils, clientPackageName,
73 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
74 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
75 clientUid, servicePid, overrideForPerfClass, rotationOverride,
76 /*legacyClient*/ true),
77 mParameters(api1CameraId, cameraFacing),
78 mLatestRequestIds(kMaxRequestIds),
79 mLatestFailedRequestIds(kMaxRequestIds)
80 {
81 ATRACE_CALL();
82
83 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
84 mRotateAndCropIsSupported = false;
85 mRotateAndCropPreviewTransform = 0;
86
87 SharedParameters::Lock l(mParameters);
88 l.mParameters.state = Parameters::DISCONNECTED;
89 l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
90 }
91
initialize(sp<CameraProviderManager> manager,const std::string & monitorTags)92 status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
93 const std::string& monitorTags) {
94 return initializeImpl(manager, monitorTags);
95 }
96
isZslEnabledInStillTemplate()97 bool Camera2Client::isZslEnabledInStillTemplate() {
98 bool zslEnabled = false;
99 CameraMetadata stillTemplate;
100 status_t res = mDevice->createDefaultRequest(
101 camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
102 if (res == OK) {
103 camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
104 if (enableZsl.count == 1) {
105 zslEnabled = (enableZsl.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE);
106 }
107 }
108
109 return zslEnabled;
110 }
111
112 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr,const std::string & monitorTags)113 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags)
114 {
115 ATRACE_CALL();
116 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
117 status_t res;
118
119 res = Camera2ClientBase::initialize(providerPtr, monitorTags);
120 if (res != OK) {
121 return res;
122 }
123
124 {
125 SharedParameters::Lock l(mParameters);
126
127 res = l.mParameters.initialize(mDevice.get());
128 if (res != OK) {
129 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
130 __FUNCTION__, mCameraId, strerror(-res), res);
131 return NO_INIT;
132 }
133
134 l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
135 }
136
137 const CameraMetadata& staticInfo = mDevice->info();
138 mRotateAndCropIsSupported = camera3::RotateAndCropMapper::isNeeded(&staticInfo);
139 // The 'mRotateAndCropMode' value only accounts for the necessary adjustment
140 // when the display rotates. The sensor orientation still needs to be calculated
141 // and applied similar to the Camera2 path.
142 CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
143 &mRotateAndCropPreviewTransform);
144
145 mStreamingProcessor = new StreamingProcessor(this);
146
147 std::string threadName = std::string("C2-") + std::to_string(mCameraId);
148 mFrameProcessor = new FrameProcessor(mDevice, this);
149 res = mFrameProcessor->run((threadName + "-FrameProc").c_str());
150 if (res != OK) {
151 ALOGE("%s: Unable to start frame processor thread: %s (%d)",
152 __FUNCTION__, strerror(-res), res);
153 return res;
154 }
155
156 mCaptureSequencer = new CaptureSequencer(this);
157 res = mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
158 if (res != OK) {
159 ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
160 __FUNCTION__, strerror(-res), res);
161 return res;
162 }
163
164 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
165 res = mJpegProcessor->run((threadName + "-JpegProc").c_str());
166 if (res != OK) {
167 ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
168 __FUNCTION__, strerror(-res), res);
169 return res;
170 }
171
172 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
173 res = mZslProcessor->run((threadName + "-ZslProc").c_str());
174 if (res != OK) {
175 ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
176 __FUNCTION__, strerror(-res), res);
177 return res;
178 }
179
180 mCallbackProcessor = new CallbackProcessor(this);
181 res = mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
182 if (res != OK) {
183 ALOGE("%s: Unable to start callback processor thread: %s (%d)",
184 __FUNCTION__, strerror(-res), res);
185 return res;
186 }
187
188 if (gLogLevel >= 1) {
189 SharedParameters::Lock l(mParameters);
190 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
191 mCameraId);
192 ALOGD("%s", l.mParameters.paramsFlattened.c_str());
193 }
194
195 return OK;
196 }
197
~Camera2Client()198 Camera2Client::~Camera2Client() {
199 ATRACE_CALL();
200 ALOGV("~Camera2Client");
201
202 mDestructionStarted = true;
203
204 disconnect();
205
206 ALOGI("Camera %d: Closed", mCameraId);
207 }
208
dump(int fd,const Vector<String16> & args)209 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
210 return BasicClient::dump(fd, args);
211 }
212
dumpClient(int fd,const Vector<String16> & args)213 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
214 std::ostringstream result;
215 result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
216 (getRemoteCallback() != NULL ?
217 (void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
218 mClientPid);
219 result << " State: ";
220 #define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
221
222 const Parameters& p = mParameters.unsafeAccess();
223
224 result << Parameters::getStateName(p.state);
225
226 result << "\n Current parameters:\n";
227 result << fmt::sprintf(" Preview size: %d x %d\n",
228 p.previewWidth, p.previewHeight);
229 result << fmt::sprintf(" Preview FPS range: %d - %d\n",
230 p.previewFpsRange[0], p.previewFpsRange[1]);
231 result << fmt::sprintf(" Preview HAL pixel format: 0x%x\n",
232 p.previewFormat);
233 result << fmt::sprintf(" Preview transform: %x\n",
234 p.previewTransform);
235 result << fmt::sprintf(" Picture size: %d x %d\n",
236 p.pictureWidth, p.pictureHeight);
237 result << fmt::sprintf(" Jpeg thumbnail size: %d x %d\n",
238 p.jpegThumbSize[0], p.jpegThumbSize[1]);
239 result << fmt::sprintf(" Jpeg quality: %d, thumbnail quality: %d\n",
240 p.jpegQuality, p.jpegThumbQuality);
241 result << fmt::sprintf(" Jpeg rotation: %d\n", p.jpegRotation);
242 result << fmt::sprintf(" GPS tags %s\n",
243 p.gpsEnabled ? "enabled" : "disabled");
244 if (p.gpsEnabled) {
245 result << fmt::sprintf(" GPS lat x long x alt: %f x %f x %f\n",
246 p.gpsCoordinates[0], p.gpsCoordinates[1],
247 p.gpsCoordinates[2]);
248 result << fmt::sprintf(" GPS timestamp: %" PRId64 "\n",
249 p.gpsTimestamp);
250 result << fmt::sprintf(" GPS processing method: %s\n",
251 p.gpsProcessingMethod.c_str());
252 }
253
254 result << " White balance mode: ";
255 switch (p.wbMode) {
256 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
257 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
258 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
259 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
260 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
261 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
262 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
263 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
264 default: result << "UNKNOWN\n";
265 }
266
267 result << " Effect mode: ";
268 switch (p.effectMode) {
269 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
270 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
271 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
272 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
273 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
274 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
275 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
276 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
277 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
278 default: result << "UNKNOWN\n";
279 }
280
281 result << " Antibanding mode: ";
282 switch (p.antibandingMode) {
283 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
284 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
285 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
286 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
287 default: result << "UNKNOWN\n";
288 }
289
290 result << " Scene mode: ";
291 switch (p.sceneMode) {
292 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
293 result << "AUTO\n"; break;
294 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
295 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
296 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
297 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
298 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
299 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
300 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
301 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
302 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
303 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
304 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
305 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
306 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
307 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
308 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
309 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
310 default: result << "UNKNOWN\n";
311 }
312
313 result << " Flash mode: ";
314 switch (p.flashMode) {
315 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
316 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
317 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
318 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
319 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
320 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
321 default: result << "UNKNOWN\n";
322 }
323
324 result << " Focus mode: ";
325 switch (p.focusMode) {
326 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
327 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
328 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
329 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
330 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
331 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
332 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
333 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
334 default: result << "UNKNOWN\n";
335 }
336
337 result << " Focus state: ";
338 switch (p.focusState) {
339 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
340 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
341 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
342 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
343 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
344 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
345 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
346 default: result << "UNKNOWN\n";
347 }
348
349 result << " Focusing areas:\n";
350 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
351 result << fmt::sprintf(" [ (%d, %d, %d, %d), weight %d ]\n",
352 p.focusingAreas[i].left,
353 p.focusingAreas[i].top,
354 p.focusingAreas[i].right,
355 p.focusingAreas[i].bottom,
356 p.focusingAreas[i].weight);
357 }
358
359 result << fmt::sprintf(" Exposure compensation index: %d\n",
360 p.exposureCompensation);
361
362 result << fmt::sprintf(" AE lock %s, AWB lock %s\n",
363 p.autoExposureLock ? "enabled" : "disabled",
364 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
365
366 result << " Metering areas:\n";
367 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
368 result << fmt::sprintf(" [ (%d, %d, %d, %d), weight %d ]\n",
369 p.meteringAreas[i].left,
370 p.meteringAreas[i].top,
371 p.meteringAreas[i].right,
372 p.meteringAreas[i].bottom,
373 p.meteringAreas[i].weight);
374 }
375
376 result << fmt::sprintf(" Zoom index: %d\n", p.zoom);
377 result << fmt::sprintf(" Video size: %d x %d\n", p.videoWidth,
378 p.videoHeight);
379
380 result << fmt::sprintf(" Recording hint is %s\n",
381 p.recordingHint ? "set" : "not set");
382
383 result << fmt::sprintf(" Video stabilization is %s\n",
384 p.videoStabilization ? "enabled" : "disabled");
385
386 result << fmt::sprintf(" Selected still capture FPS range: %d - %d\n",
387 p.fastInfo.bestStillCaptureFpsRange[0],
388 p.fastInfo.bestStillCaptureFpsRange[1]);
389
390 result << fmt::sprintf(" Use zero shutter lag: %s\n",
391 p.useZeroShutterLag() ? "yes" : "no");
392
393 result << " Current streams:\n";
394 result << fmt::sprintf(" Preview stream ID: %d\n",
395 getPreviewStreamId());
396 result << fmt::sprintf(" Capture stream ID: %d\n",
397 getCaptureStreamId());
398 result << fmt::sprintf(" Recording stream ID: %d\n",
399 getRecordingStreamId());
400
401 result << " Quirks for this camera:\n";
402 bool haveQuirk = false;
403 if (p.quirks.triggerAfWithAuto) {
404 result << " triggerAfWithAuto\n";
405 haveQuirk = true;
406 }
407 if (p.quirks.useZslFormat) {
408 result << " useZslFormat\n";
409 haveQuirk = true;
410 }
411 if (p.quirks.meteringCropRegion) {
412 result << " meteringCropRegion\n";
413 haveQuirk = true;
414 }
415 if (p.quirks.partialResults) {
416 result << " usePartialResult\n";
417 haveQuirk = true;
418 }
419 if (!haveQuirk) {
420 result << " none\n";
421 }
422
423 std::string resultStr = std::move(result.str());
424
425 write(fd, resultStr.c_str(), resultStr.size());
426
427 mStreamingProcessor->dump(fd, args);
428
429 mCaptureSequencer->dump(fd, args);
430
431 mFrameProcessor->dump(fd, args);
432
433 mZslProcessor->dump(fd, args);
434
435 return dumpDevice(fd, args);
436 #undef CASE_APPEND_ENUM
437 }
438
439 // ICamera interface
440
disconnect()441 binder::Status Camera2Client::disconnect() {
442 ATRACE_CALL();
443 nsecs_t startTime = systemTime();
444 Mutex::Autolock icl(mBinderSerializationLock);
445
446 binder::Status res = binder::Status::ok();
447 // Allow both client and the cameraserver to disconnect at all times
448 int callingPid = getCallingPid();
449 if (callingPid != mClientPid && callingPid != mServicePid) return res;
450
451 if (mDevice == 0) return res;
452
453 ALOGV("Camera %d: Shutting down", mCameraId);
454
455 /**
456 * disconnect() cannot call any methods that might need to promote a
457 * wp<Camera2Client>, since disconnect can be called from the destructor, at
458 * which point all such promotions will fail.
459 */
460
461 stopPreviewL();
462
463 {
464 SharedParameters::Lock l(mParameters);
465 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
466 l.mParameters.state = Parameters::DISCONNECTED;
467 }
468
469 mFrameProcessor->requestExit();
470 mCaptureSequencer->requestExit();
471 mJpegProcessor->requestExit();
472 mZslProcessor->requestExit();
473 mCallbackProcessor->requestExit();
474
475 ALOGV("Camera %d: Waiting for threads", mCameraId);
476
477 {
478 // Don't wait with lock held, in case the other threads need to
479 // complete callbacks that re-enter Camera2Client
480 mBinderSerializationLock.unlock();
481
482 mFrameProcessor->join();
483 mCaptureSequencer->join();
484 mJpegProcessor->join();
485 mZslProcessor->join();
486 mCallbackProcessor->join();
487
488 mBinderSerializationLock.lock();
489 }
490
491 ALOGV("Camera %d: Deleting streams", mCameraId);
492
493 mStreamingProcessor->deletePreviewStream();
494 mStreamingProcessor->deleteRecordingStream();
495 mJpegProcessor->deleteStream();
496 mCallbackProcessor->deleteStream();
497 mZslProcessor->deleteStream();
498
499 ALOGV("Camera %d: Disconnecting device", mCameraId);
500
501 bool hasDeviceError = mDevice->hasDeviceError();
502 mDevice->disconnect();
503
504 CameraService::Client::disconnect();
505
506 int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
507 mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
508
509 return res;
510 }
511
connect(const sp<hardware::ICameraClient> & client)512 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
513 ATRACE_CALL();
514 ALOGV("%s: E", __FUNCTION__);
515 Mutex::Autolock icl(mBinderSerializationLock);
516
517 if (mClientPid != 0 && getCallingPid() != mClientPid) {
518 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
519 "current locked to pid %d", __FUNCTION__,
520 mCameraId, getCallingPid(), mClientPid);
521 return BAD_VALUE;
522 }
523
524 mClientPid = getCallingPid();
525
526 mRemoteCallback = client;
527 mSharedCameraCallbacks = client;
528
529 return OK;
530 }
531
lock()532 status_t Camera2Client::lock() {
533 ATRACE_CALL();
534 ALOGV("%s: E", __FUNCTION__);
535 Mutex::Autolock icl(mBinderSerializationLock);
536 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
537 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
538
539 if (mClientPid == 0) {
540 mClientPid = getCallingPid();
541 return OK;
542 }
543
544 if (mClientPid != getCallingPid()) {
545 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
546 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
547 return EBUSY;
548 }
549
550 return OK;
551 }
552
unlock()553 status_t Camera2Client::unlock() {
554 ATRACE_CALL();
555 ALOGV("%s: E", __FUNCTION__);
556 Mutex::Autolock icl(mBinderSerializationLock);
557 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
558 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
559
560 if (mClientPid == getCallingPid()) {
561 SharedParameters::Lock l(mParameters);
562 if (l.mParameters.state == Parameters::RECORD ||
563 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
564 ALOGD("Not allowed to unlock camera during recording.");
565 return INVALID_OPERATION;
566 }
567 mClientPid = 0;
568 mRemoteCallback.clear();
569 mSharedCameraCallbacks.clear();
570 return OK;
571 }
572
573 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
574 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
575 return EBUSY;
576 }
577
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)578 status_t Camera2Client::setPreviewTarget(
579 const sp<IGraphicBufferProducer>& bufferProducer) {
580 ATRACE_CALL();
581 ALOGV("%s: E", __FUNCTION__);
582 Mutex::Autolock icl(mBinderSerializationLock);
583 status_t res;
584 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
585
586 sp<IBinder> binder;
587 sp<Surface> window;
588 if (bufferProducer != 0) {
589 binder = IInterface::asBinder(bufferProducer);
590 // Using controlledByApp flag to ensure that the buffer queue remains in
591 // async mode for the old camera API, where many applications depend
592 // on that behavior.
593 window = new Surface(bufferProducer, /*controlledByApp*/ true);
594 }
595 return setPreviewWindowL(binder, window);
596 }
597
setPreviewWindowL(const sp<IBinder> & binder,const sp<Surface> & window)598 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
599 const sp<Surface>& window) {
600 ATRACE_CALL();
601 status_t res;
602
603 if (binder == mPreviewSurface) {
604 ALOGV("%s: Camera %d: New window is same as old window",
605 __FUNCTION__, mCameraId);
606 return NO_ERROR;
607 }
608
609 Parameters::State state;
610 {
611 SharedParameters::Lock l(mParameters);
612 state = l.mParameters.state;
613 }
614 switch (state) {
615 case Parameters::DISCONNECTED:
616 case Parameters::RECORD:
617 case Parameters::STILL_CAPTURE:
618 case Parameters::VIDEO_SNAPSHOT:
619 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
620 __FUNCTION__, mCameraId,
621 Parameters::getStateName(state));
622 return INVALID_OPERATION;
623 case Parameters::STOPPED:
624 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
625 // OK
626 break;
627 case Parameters::PREVIEW:
628 // Already running preview - need to stop and create a new stream
629 res = stopStream();
630 if (res != OK) {
631 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
632 __FUNCTION__, strerror(-res), res);
633 return res;
634 }
635 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
636 break;
637 }
638
639 mPreviewSurface = binder;
640 res = mStreamingProcessor->setPreviewWindow(window);
641 if (res != OK) {
642 ALOGE("%s: Unable to set new preview window: %s (%d)",
643 __FUNCTION__, strerror(-res), res);
644 return res;
645 }
646
647 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
648 SharedParameters::Lock l(mParameters);
649 l.mParameters.state = state;
650 return startPreviewL(l.mParameters, false);
651 }
652
653 return OK;
654 }
655
setPreviewCallbackFlag(int flag)656 void Camera2Client::setPreviewCallbackFlag(int flag) {
657 ATRACE_CALL();
658 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
659 Mutex::Autolock icl(mBinderSerializationLock);
660
661 if ( checkPid(__FUNCTION__) != OK) return;
662
663 SharedParameters::Lock l(mParameters);
664 setPreviewCallbackFlagL(l.mParameters, flag);
665 }
666
setPreviewCallbackFlagL(Parameters & params,int flag)667 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
668 status_t res = OK;
669
670 switch(params.state) {
671 case Parameters::STOPPED:
672 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
673 case Parameters::PREVIEW:
674 case Parameters::STILL_CAPTURE:
675 // OK
676 break;
677 default:
678 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
679 ALOGE("%s: Camera %d: Can't use preview callbacks "
680 "in state %d", __FUNCTION__, mCameraId, params.state);
681 return;
682 }
683 }
684
685 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
686 ALOGV("%s: setting oneshot", __FUNCTION__);
687 params.previewCallbackOneShot = true;
688 }
689 if (params.previewCallbackFlags != (uint32_t)flag) {
690
691 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
692 // Disable any existing preview callback window when enabling
693 // preview callback flags
694 res = mCallbackProcessor->setCallbackWindow(NULL);
695 if (res != OK) {
696 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
697 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
698 return;
699 }
700 params.previewCallbackSurface = false;
701 }
702
703 params.previewCallbackFlags = flag;
704
705 if (params.state == Parameters::PREVIEW) {
706 res = startPreviewL(params, true);
707 if (res != OK) {
708 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
709 __FUNCTION__, mCameraId,
710 Parameters::getStateName(params.state));
711 }
712 }
713 }
714 }
715
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)716 status_t Camera2Client::setPreviewCallbackTarget(
717 const sp<IGraphicBufferProducer>& callbackProducer) {
718 ATRACE_CALL();
719 ALOGV("%s: E", __FUNCTION__);
720 Mutex::Autolock icl(mBinderSerializationLock);
721 status_t res;
722 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
723
724 sp<Surface> window;
725 if (callbackProducer != 0) {
726 window = new Surface(callbackProducer);
727 }
728
729 res = mCallbackProcessor->setCallbackWindow(window);
730 if (res != OK) {
731 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
732 __FUNCTION__, mCameraId, strerror(-res), res);
733 return res;
734 }
735
736 SharedParameters::Lock l(mParameters);
737
738 if (window != NULL) {
739 // Disable traditional callbacks when a valid callback target is given
740 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
741 l.mParameters.previewCallbackOneShot = false;
742 l.mParameters.previewCallbackSurface = true;
743 } else {
744 // Disable callback target if given a NULL interface.
745 l.mParameters.previewCallbackSurface = false;
746 }
747
748 switch(l.mParameters.state) {
749 case Parameters::PREVIEW:
750 res = startPreviewL(l.mParameters, true);
751 break;
752 case Parameters::RECORD:
753 case Parameters::VIDEO_SNAPSHOT:
754 res = startRecordingL(l.mParameters, true);
755 break;
756 default:
757 break;
758 }
759 if (res != OK) {
760 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
761 __FUNCTION__, mCameraId,
762 Parameters::getStateName(l.mParameters.state));
763 }
764
765 return OK;
766 }
767
768
startPreview()769 status_t Camera2Client::startPreview() {
770 ATRACE_CALL();
771 ALOGV("%s: E", __FUNCTION__);
772 Mutex::Autolock icl(mBinderSerializationLock);
773 status_t res;
774 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
775 SharedParameters::Lock l(mParameters);
776 return startPreviewL(l.mParameters, false);
777 }
778
startPreviewL(Parameters & params,bool restart)779 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
780 ATRACE_CALL();
781 status_t res;
782
783 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
784
785 if (params.state == Parameters::DISCONNECTED) {
786 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
787 return INVALID_OPERATION;
788 }
789 if ( (params.state == Parameters::PREVIEW ||
790 params.state == Parameters::RECORD ||
791 params.state == Parameters::VIDEO_SNAPSHOT)
792 && !restart) {
793 // Succeed attempt to re-enter a streaming state
794 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
795 __FUNCTION__, mCameraId);
796 return OK;
797 }
798 if (params.state > Parameters::PREVIEW && !restart) {
799 ALOGE("%s: Can't start preview in state %s",
800 __FUNCTION__,
801 Parameters::getStateName(params.state));
802 return INVALID_OPERATION;
803 }
804
805 if (!mStreamingProcessor->haveValidPreviewWindow()) {
806 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
807 return OK;
808 }
809 params.state = Parameters::STOPPED;
810 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
811
812 res = mStreamingProcessor->updatePreviewStream(params);
813 if (res != OK) {
814 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
815 __FUNCTION__, mCameraId, strerror(-res), res);
816 return res;
817 }
818
819 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
820
821 // We could wait to create the JPEG output stream until first actual use
822 // (first takePicture call). However, this would substantially increase the
823 // first capture latency on HAL3 devices.
824 // So create it unconditionally at preview start. As a drawback,
825 // this increases gralloc memory consumption for applications that don't
826 // ever take a picture. Do not enter this mode when jpeg stream will slow
827 // down preview.
828 // TODO: Find a better compromise, though this likely would involve HAL
829 // changes.
830 int lastJpegStreamId = mJpegProcessor->getStreamId();
831 // If jpeg stream will slow down preview, make sure we remove it before starting preview
832 if (params.slowJpegMode) {
833 if (lastJpegStreamId != NO_STREAM) {
834 // Pause preview if we are streaming
835 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
836 if (activeRequestId != 0) {
837 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
838 if (res != OK) {
839 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
840 __FUNCTION__, mCameraId, strerror(-res), res);
841 }
842 res = mDevice->waitUntilDrained();
843 if (res != OK) {
844 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
845 __FUNCTION__, mCameraId, strerror(-res), res);
846 }
847 }
848
849 res = mJpegProcessor->deleteStream();
850
851 if (res != OK) {
852 ALOGE("%s: Camera %d: delete Jpeg stream failed: %s (%d)",
853 __FUNCTION__, mCameraId, strerror(-res), res);
854 }
855
856 if (activeRequestId != 0) {
857 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
858 if (res != OK) {
859 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
860 __FUNCTION__, mCameraId, strerror(-res), res);
861 }
862 }
863 }
864 } else {
865 res = updateProcessorStream(mJpegProcessor, params);
866 if (res != OK) {
867 ALOGE("%s: Camera %d: Can't pre-configure still image "
868 "stream: %s (%d)",
869 __FUNCTION__, mCameraId, strerror(-res), res);
870 return res;
871 }
872 }
873 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
874
875 Vector<int32_t> outputStreams;
876 bool callbacksEnabled = (params.previewCallbackFlags &
877 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
878 params.previewCallbackSurface;
879
880 if (callbacksEnabled) {
881 // Can't have recording stream hanging around when enabling callbacks,
882 // since it exceeds the max stream count on some devices.
883 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
884 ALOGV("%s: Camera %d: Clearing out recording stream before "
885 "creating callback stream", __FUNCTION__, mCameraId);
886 res = mStreamingProcessor->stopStream();
887 if (res != OK) {
888 ALOGE("%s: Camera %d: Can't stop streaming to delete "
889 "recording stream", __FUNCTION__, mCameraId);
890 return res;
891 }
892 res = mStreamingProcessor->deleteRecordingStream();
893 if (res != OK) {
894 ALOGE("%s: Camera %d: Unable to delete recording stream before "
895 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
896 strerror(-res), res);
897 return res;
898 }
899 }
900
901 res = mCallbackProcessor->updateStream(params);
902 if (res != OK) {
903 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
904 __FUNCTION__, mCameraId, strerror(-res), res);
905 return res;
906 }
907 outputStreams.push(getCallbackStreamId());
908 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
909 /**
910 * Delete the unused callback stream when preview stream is changed and
911 * preview is not enabled. Don't need stop preview stream as preview is in
912 * STOPPED state now.
913 */
914 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
915 res = mCallbackProcessor->deleteStream();
916 if (res != OK) {
917 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
918 __FUNCTION__, mCameraId, strerror(-res), res);
919 return res;
920 }
921 }
922
923 if (params.useZeroShutterLag() &&
924 getRecordingStreamId() == NO_STREAM) {
925 res = updateProcessorStream(mZslProcessor, params);
926 if (res != OK) {
927 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
928 __FUNCTION__, mCameraId, strerror(-res), res);
929 return res;
930 }
931
932 if (jpegStreamChanged) {
933 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
934 __FUNCTION__, mCameraId);
935 mZslProcessor->clearZslQueue();
936 }
937 outputStreams.push(getZslStreamId());
938 } else {
939 mZslProcessor->deleteStream();
940 }
941
942 outputStreams.push(getPreviewStreamId());
943
944 if (params.isDeviceZslSupported) {
945 // If device ZSL is supported, resume preview buffers that may be paused
946 // during last takePicture().
947 mDevice->dropStreamBuffers(false, getPreviewStreamId());
948 }
949
950 if (!params.recordingHint) {
951 if (!restart) {
952 res = mStreamingProcessor->updatePreviewRequest(params);
953 if (res != OK) {
954 ALOGE("%s: Camera %d: Can't set up preview request: "
955 "%s (%d)", __FUNCTION__, mCameraId,
956 strerror(-res), res);
957 return res;
958 }
959 }
960 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
961 outputStreams);
962 } else {
963 if (!restart) {
964 res = mStreamingProcessor->updateRecordingRequest(params);
965 if (res != OK) {
966 ALOGE("%s: Camera %d: Can't set up preview request with "
967 "record hint: %s (%d)", __FUNCTION__, mCameraId,
968 strerror(-res), res);
969 return res;
970 }
971 }
972 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
973 outputStreams);
974 }
975 if (res != OK) {
976 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
977 __FUNCTION__, mCameraId, strerror(-res), res);
978 return res;
979 }
980
981 mCallbackProcessor->unpauseCallback();
982 params.state = Parameters::PREVIEW;
983 return OK;
984 }
985
stopPreview()986 void Camera2Client::stopPreview() {
987 ATRACE_CALL();
988 ALOGV("%s: E", __FUNCTION__);
989 Mutex::Autolock icl(mBinderSerializationLock);
990 status_t res;
991 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
992 stopPreviewL();
993 }
994
stopPreviewL()995 void Camera2Client::stopPreviewL() {
996 ATRACE_CALL();
997 status_t res;
998 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
999 Parameters::State state;
1000 {
1001 SharedParameters::Lock l(mParameters);
1002 state = l.mParameters.state;
1003 }
1004
1005 switch (state) {
1006 case Parameters::DISCONNECTED:
1007 // Nothing to do.
1008 break;
1009 case Parameters::STOPPED:
1010 case Parameters::VIDEO_SNAPSHOT:
1011 case Parameters::STILL_CAPTURE:
1012 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
1013 FALLTHROUGH_INTENDED;
1014 case Parameters::RECORD:
1015 case Parameters::PREVIEW:
1016 mCallbackProcessor->pauseCallback();
1017 syncWithDevice();
1018 // Due to flush a camera device sync is not a sufficient
1019 // guarantee that the current client parameters are
1020 // correctly applied. To resolve this wait for the current
1021 // request id to return in the results.
1022 waitUntilCurrentRequestIdLocked();
1023 res = stopStream();
1024 if (res != OK) {
1025 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1026 __FUNCTION__, mCameraId, strerror(-res), res);
1027 }
1028
1029 // Flush all in-process captures and buffer in order to stop
1030 // preview faster.
1031 res = mDevice->flush();
1032 if (res != OK) {
1033 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
1034 __FUNCTION__, mCameraId, strerror(-res), res);
1035 }
1036
1037 res = mDevice->waitUntilDrained();
1038 if (res != OK) {
1039 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1040 __FUNCTION__, mCameraId, strerror(-res), res);
1041 }
1042 // Clean up recording stream
1043 res = mStreamingProcessor->deleteRecordingStream();
1044 if (res != OK) {
1045 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1046 "stop preview: %s (%d)",
1047 __FUNCTION__, mCameraId, strerror(-res), res);
1048 }
1049 FALLTHROUGH_INTENDED;
1050 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
1051 SharedParameters::Lock l(mParameters);
1052 l.mParameters.state = Parameters::STOPPED;
1053 commandStopFaceDetectionL(l.mParameters);
1054 break;
1055 }
1056 default:
1057 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
1058 state);
1059 }
1060 }
1061
previewEnabled()1062 bool Camera2Client::previewEnabled() {
1063 ATRACE_CALL();
1064 Mutex::Autolock icl(mBinderSerializationLock);
1065 status_t res;
1066 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
1067
1068 SharedParameters::Lock l(mParameters);
1069 return l.mParameters.state == Parameters::PREVIEW;
1070 }
1071
setVideoBufferMode(int32_t videoBufferMode)1072 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
1073 ATRACE_CALL();
1074 Mutex::Autolock icl(mBinderSerializationLock);
1075 status_t res;
1076 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1077
1078 SharedParameters::Lock l(mParameters);
1079 switch (l.mParameters.state) {
1080 case Parameters::RECORD:
1081 case Parameters::VIDEO_SNAPSHOT:
1082 ALOGE("%s: Camera %d: Can't be called in state %s",
1083 __FUNCTION__, mCameraId,
1084 Parameters::getStateName(l.mParameters.state));
1085 return INVALID_OPERATION;
1086 default:
1087 // OK
1088 break;
1089 }
1090
1091 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1092 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
1093 return BAD_VALUE;
1094 }
1095
1096 l.mParameters.videoBufferMode = videoBufferMode;
1097
1098 return OK;
1099 }
1100
startRecording()1101 status_t Camera2Client::startRecording() {
1102 ATRACE_CALL();
1103 ALOGV("%s: E", __FUNCTION__);
1104 Mutex::Autolock icl(mBinderSerializationLock);
1105 status_t res;
1106 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1107 SharedParameters::Lock l(mParameters);
1108
1109 return startRecordingL(l.mParameters, false);
1110 }
1111
startRecordingL(Parameters & params,bool restart)1112 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1113 status_t res = OK;
1114
1115 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1116
1117 switch (params.state) {
1118 case Parameters::STOPPED:
1119 res = startPreviewL(params, false);
1120 if (res != OK) return res;
1121 // Make sure first preview request is submitted to the HAL device to avoid
1122 // two consecutive set of configure_streams being called into the HAL.
1123 // TODO: Refactor this to avoid initial preview configuration.
1124 syncWithDevice();
1125 break;
1126 case Parameters::PREVIEW:
1127 // Ready to go
1128 break;
1129 case Parameters::RECORD:
1130 case Parameters::VIDEO_SNAPSHOT:
1131 // OK to call this when recording is already on, just skip unless
1132 // we're looking to restart
1133 if (!restart) return OK;
1134 break;
1135 default:
1136 ALOGE("%s: Camera %d: Can't start recording in state %s",
1137 __FUNCTION__, mCameraId,
1138 Parameters::getStateName(params.state));
1139 return INVALID_OPERATION;
1140 };
1141
1142 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1143 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1144 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1145 return INVALID_OPERATION;
1146 }
1147
1148 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1149 ALOGE("%s: No valid recording window", __FUNCTION__);
1150 return INVALID_OPERATION;
1151 }
1152
1153 if (!restart) {
1154 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1155 mStreamingProcessor->updateRecordingRequest(params);
1156 if (res != OK) {
1157 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1158 __FUNCTION__, mCameraId, strerror(-res), res);
1159 return res;
1160 }
1161 }
1162
1163 // Not all devices can support a preview callback stream and a recording
1164 // stream at the same time, so assume none of them can.
1165 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1166 ALOGV("%s: Camera %d: Clearing out callback stream before "
1167 "creating recording stream", __FUNCTION__, mCameraId);
1168 res = mStreamingProcessor->stopStream();
1169 if (res != OK) {
1170 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1171 __FUNCTION__, mCameraId);
1172 return res;
1173 }
1174 res = mCallbackProcessor->deleteStream();
1175 if (res != OK) {
1176 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1177 "record: %s (%d)", __FUNCTION__, mCameraId,
1178 strerror(-res), res);
1179 return res;
1180 }
1181 }
1182
1183 // Clean up ZSL before transitioning into recording
1184 if (mZslProcessor->getStreamId() != NO_STREAM) {
1185 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1186 "creating recording stream", __FUNCTION__, mCameraId);
1187 res = mStreamingProcessor->stopStream();
1188 if (res != OK) {
1189 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1190 __FUNCTION__, mCameraId);
1191 return res;
1192 }
1193 res = mDevice->waitUntilDrained();
1194 if (res != OK) {
1195 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1196 __FUNCTION__, mCameraId, strerror(-res), res);
1197 }
1198 res = mZslProcessor->clearZslQueue();
1199 if (res != OK) {
1200 ALOGE("%s: Camera %d: Can't clear zsl queue",
1201 __FUNCTION__, mCameraId);
1202 return res;
1203 }
1204 res = mZslProcessor->deleteStream();
1205 if (res != OK) {
1206 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1207 "record: %s (%d)", __FUNCTION__, mCameraId,
1208 strerror(-res), res);
1209 return res;
1210 }
1211 }
1212
1213 // Disable callbacks if they're enabled; can't record and use callbacks,
1214 // and we can't fail record start without stagefright asserting.
1215 params.previewCallbackFlags = 0;
1216
1217 // May need to reconfigure video snapshot JPEG sizes
1218 // during recording startup, so need a more complex sequence here to
1219 // ensure an early stream reconfiguration doesn't happen
1220 bool recordingStreamNeedsUpdate;
1221 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1222 if (res != OK) {
1223 ALOGE("%s: Camera %d: Can't query recording stream",
1224 __FUNCTION__, mCameraId);
1225 return res;
1226 }
1227
1228 if (recordingStreamNeedsUpdate) {
1229 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1230 // Right now camera device cannot handle configureStream failure gracefully
1231 // when device is streaming
1232 res = mStreamingProcessor->stopStream();
1233 if (res != OK) {
1234 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1235 "stream", __FUNCTION__, mCameraId);
1236 return res;
1237 }
1238 res = mDevice->waitUntilDrained();
1239 if (res != OK) {
1240 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1241 "%s (%d)", __FUNCTION__, mCameraId,
1242 strerror(-res), res);
1243 }
1244
1245 res = updateProcessorStream<
1246 StreamingProcessor,
1247 &StreamingProcessor::updateRecordingStream>(
1248 mStreamingProcessor,
1249 params);
1250 if (res != OK) {
1251 ALOGE("%s: Camera %d: Unable to update recording stream: "
1252 "%s (%d)", __FUNCTION__, mCameraId,
1253 strerror(-res), res);
1254 return res;
1255 }
1256 }
1257
1258 Vector<int32_t> outputStreams;
1259 outputStreams.push(getPreviewStreamId());
1260 outputStreams.push(getRecordingStreamId());
1261
1262 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1263 outputStreams);
1264
1265 // startStream might trigger a configureStream call and device might fail
1266 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1267 // to video size.
1268 if (res == BAD_VALUE) {
1269 overrideVideoSnapshotSize(params);
1270 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1271 outputStreams);
1272 }
1273
1274 if (res != OK) {
1275 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1276 __FUNCTION__, mCameraId, strerror(-res), res);
1277 return res;
1278 }
1279
1280 if (params.state < Parameters::RECORD) {
1281 params.state = Parameters::RECORD;
1282 }
1283
1284 return OK;
1285 }
1286
stopRecording()1287 void Camera2Client::stopRecording() {
1288 ATRACE_CALL();
1289 ALOGV("%s: E", __FUNCTION__);
1290 Mutex::Autolock icl(mBinderSerializationLock);
1291 SharedParameters::Lock l(mParameters);
1292
1293 status_t res;
1294 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1295
1296 switch (l.mParameters.state) {
1297 case Parameters::RECORD:
1298 // OK to stop
1299 break;
1300 case Parameters::STOPPED:
1301 case Parameters::PREVIEW:
1302 case Parameters::STILL_CAPTURE:
1303 case Parameters::VIDEO_SNAPSHOT:
1304 default:
1305 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1306 __FUNCTION__, mCameraId,
1307 Parameters::getStateName(l.mParameters.state));
1308 return;
1309 };
1310
1311 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1312
1313 // Remove recording stream because the video target may be abandoned soon.
1314 res = stopStream();
1315 if (res != OK) {
1316 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1317 __FUNCTION__, mCameraId, strerror(-res), res);
1318 }
1319
1320 res = mDevice->waitUntilDrained();
1321 if (res != OK) {
1322 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1323 __FUNCTION__, mCameraId, strerror(-res), res);
1324 }
1325 // Clean up recording stream
1326 res = mStreamingProcessor->deleteRecordingStream();
1327 if (res != OK) {
1328 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1329 "stop preview: %s (%d)",
1330 __FUNCTION__, mCameraId, strerror(-res), res);
1331 }
1332 l.mParameters.recoverOverriddenJpegSize();
1333
1334 // Restart preview
1335 res = startPreviewL(l.mParameters, true);
1336 if (res != OK) {
1337 ALOGE("%s: Camera %d: Unable to return to preview",
1338 __FUNCTION__, mCameraId);
1339 }
1340 }
1341
recordingEnabled()1342 bool Camera2Client::recordingEnabled() {
1343 ATRACE_CALL();
1344 Mutex::Autolock icl(mBinderSerializationLock);
1345
1346 if ( checkPid(__FUNCTION__) != OK) return false;
1347
1348 return recordingEnabledL();
1349 }
1350
recordingEnabledL()1351 bool Camera2Client::recordingEnabledL() {
1352 ATRACE_CALL();
1353 SharedParameters::Lock l(mParameters);
1354
1355 return (l.mParameters.state == Parameters::RECORD
1356 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1357 }
1358
releaseRecordingFrame(const sp<IMemory> & mem)1359 void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
1360 ATRACE_CALL();
1361 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1362 }
1363
releaseRecordingFrameHandle(native_handle_t * handle)1364 void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
1365 ATRACE_CALL();
1366 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1367 }
1368
releaseRecordingFrameHandleBatch(const std::vector<native_handle_t * > & handles)1369 void Camera2Client::releaseRecordingFrameHandleBatch(
1370 [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
1371 ATRACE_CALL();
1372 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1373 }
1374
autoFocus()1375 status_t Camera2Client::autoFocus() {
1376 ATRACE_CALL();
1377 Mutex::Autolock icl(mBinderSerializationLock);
1378 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1379 status_t res;
1380 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1381
1382 int triggerId;
1383 bool notifyImmediately = false;
1384 bool notifySuccess = false;
1385 {
1386 SharedParameters::Lock l(mParameters);
1387 if (l.mParameters.state < Parameters::PREVIEW) {
1388 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1389 __FUNCTION__, mCameraId, l.mParameters.state);
1390 return INVALID_OPERATION;
1391 }
1392
1393 /**
1394 * If the camera does not support auto-focus, it is a no-op and
1395 * onAutoFocus(boolean, Camera) callback will be called immediately
1396 * with a fake value of success set to true.
1397 *
1398 * Similarly, if focus mode is set to INFINITY, there's no reason to
1399 * bother the HAL.
1400 */
1401 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1402 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1403 notifyImmediately = true;
1404 notifySuccess = true;
1405 }
1406 /**
1407 * If we're in CAF mode, and AF has already been locked, just fire back
1408 * the callback right away; the HAL would not send a notification since
1409 * no state change would happen on a AF trigger.
1410 */
1411 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1412 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1413 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1414 notifyImmediately = true;
1415 notifySuccess = true;
1416 }
1417 /**
1418 * Send immediate notification back to client
1419 */
1420 if (notifyImmediately) {
1421 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1422 if (l.mRemoteCallback != 0) {
1423 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1424 notifySuccess ? 1 : 0, 0);
1425 }
1426 return OK;
1427 }
1428 /**
1429 * Handle quirk mode for AF in scene modes
1430 */
1431 if (l.mParameters.quirks.triggerAfWithAuto &&
1432 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1433 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1434 !l.mParameters.focusingAreas[0].isEmpty()) {
1435 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1436 __FUNCTION__, l.mParameters.focusMode);
1437 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1438 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1439 updateRequests(l.mParameters);
1440 }
1441
1442 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1443 triggerId = l.mParameters.currentAfTriggerId;
1444 }
1445 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1446
1447 syncWithDevice();
1448
1449 mDevice->triggerAutofocus(triggerId);
1450
1451 return OK;
1452 }
1453
cancelAutoFocus()1454 status_t Camera2Client::cancelAutoFocus() {
1455 ATRACE_CALL();
1456 Mutex::Autolock icl(mBinderSerializationLock);
1457 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1458 status_t res;
1459 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1460
1461 int triggerId;
1462 {
1463 SharedParameters::Lock l(mParameters);
1464 if (l.mParameters.state == Parameters::DISCONNECTED) {
1465 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
1466 return INVALID_OPERATION;
1467 }
1468
1469 // Canceling does nothing in FIXED or INFINITY modes
1470 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1471 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1472 return OK;
1473 }
1474
1475 // An active AF trigger is canceled
1476 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1477 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1478 }
1479
1480 triggerId = ++l.mParameters.afTriggerCounter;
1481
1482 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1483 // the real state at this point. No need to cancel explicitly if
1484 // changing the AF mode.
1485 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1486 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1487 l.mParameters.shadowFocusMode);
1488 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1489 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1490 updateRequests(l.mParameters);
1491
1492 return OK;
1493 }
1494 if (l.mParameters.allowZslMode) {
1495 mZslProcessor->clearZslQueue();
1496 }
1497 }
1498 syncWithDevice();
1499
1500 mDevice->triggerCancelAutofocus(triggerId);
1501
1502 return OK;
1503 }
1504
takePicture(int)1505 status_t Camera2Client::takePicture(int /*msgType*/) {
1506 ATRACE_CALL();
1507 Mutex::Autolock icl(mBinderSerializationLock);
1508 status_t res;
1509 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1510
1511 int takePictureCounter;
1512 bool shouldSyncWithDevice = true;
1513 {
1514 SharedParameters::Lock l(mParameters);
1515 switch (l.mParameters.state) {
1516 case Parameters::DISCONNECTED:
1517 case Parameters::STOPPED:
1518 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1519 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1520 __FUNCTION__, mCameraId);
1521 return INVALID_OPERATION;
1522 case Parameters::PREVIEW:
1523 // Good to go for takePicture
1524 res = commandStopFaceDetectionL(l.mParameters);
1525 if (res != OK) {
1526 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1527 __FUNCTION__, mCameraId);
1528 return res;
1529 }
1530 l.mParameters.state = Parameters::STILL_CAPTURE;
1531
1532 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1533 if (l.mParameters.isJpegSizeOverridden() &&
1534 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1535 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1536 if (res != OK) {
1537 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1538 __FUNCTION__, mCameraId, strerror(-res), res);
1539 }
1540 res = mDevice->waitUntilDrained();
1541 if (res != OK) {
1542 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1543 __FUNCTION__, mCameraId, strerror(-res), res);
1544 }
1545 // Clean up recording stream
1546 res = mStreamingProcessor->deleteRecordingStream();
1547 if (res != OK) {
1548 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1549 "stop preview: %s (%d)",
1550 __FUNCTION__, mCameraId, strerror(-res), res);
1551 }
1552 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1553 if (res != OK) {
1554 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1555 __FUNCTION__, mCameraId, strerror(-res), res);
1556 }
1557 l.mParameters.recoverOverriddenJpegSize();
1558 }
1559 break;
1560 case Parameters::RECORD:
1561 // Good to go for video snapshot
1562 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1563 break;
1564 case Parameters::STILL_CAPTURE:
1565 case Parameters::VIDEO_SNAPSHOT:
1566 ALOGE("%s: Camera %d: Already taking a picture",
1567 __FUNCTION__, mCameraId);
1568 return INVALID_OPERATION;
1569 }
1570
1571 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1572 int lastJpegStreamId = mJpegProcessor->getStreamId();
1573 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1574 if (!l.mParameters.slowJpegMode) {
1575 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1576 }
1577
1578 // If video snapshot fail to configureStream, try override video snapshot size to
1579 // video size
1580 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1581 overrideVideoSnapshotSize(l.mParameters);
1582 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1583 }
1584 if (res != OK) {
1585 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1586 __FUNCTION__, mCameraId, strerror(-res), res);
1587 return res;
1588 }
1589 takePictureCounter = ++l.mParameters.takePictureCounter;
1590
1591 // Clear ZSL buffer queue when Jpeg size is changed.
1592 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1593 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1594 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1595 __FUNCTION__, mCameraId);
1596 mZslProcessor->clearZslQueue();
1597 }
1598
1599 // We should always sync with the device in case flash is turned on,
1600 // the camera device suggests that flash is needed (AE state FLASH_REQUIRED)
1601 // or we are in some other AE state different from CONVERGED that may need
1602 // precapture trigger.
1603 if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON &&
1604 (l.mParameters.aeState == ANDROID_CONTROL_AE_STATE_CONVERGED)) {
1605 shouldSyncWithDevice = false;
1606 }
1607 }
1608
1609 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1610
1611 // Make sure HAL has correct settings in case precapture trigger is needed.
1612 if (shouldSyncWithDevice) {
1613 syncWithDevice();
1614 }
1615
1616 res = mCaptureSequencer->startCapture();
1617 if (res != OK) {
1618 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1619 __FUNCTION__, mCameraId, strerror(-res), res);
1620 }
1621
1622 return res;
1623 }
1624
setParameters(const String8 & params)1625 status_t Camera2Client::setParameters(const String8& params) {
1626 ATRACE_CALL();
1627 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1628 Mutex::Autolock icl(mBinderSerializationLock);
1629 status_t res;
1630 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1631
1632 SharedParameters::Lock l(mParameters);
1633
1634 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1635 res = l.mParameters.set(params);
1636 if (res != OK) return res;
1637 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1638
1639 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1640 mZslProcessor->clearZslQueue();
1641 }
1642
1643 res = updateRequests(l.mParameters);
1644
1645 return res;
1646 }
1647
getParameters() const1648 String8 Camera2Client::getParameters() const {
1649 ATRACE_CALL();
1650 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1651 Mutex::Autolock icl(mBinderSerializationLock);
1652 // The camera service can unconditionally get the parameters at all times
1653 if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1654
1655 SharedParameters::ReadLock l(mParameters);
1656
1657 return l.mParameters.get();
1658 }
1659
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1660 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1661 ATRACE_CALL();
1662 Mutex::Autolock icl(mBinderSerializationLock);
1663 status_t res;
1664 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1665
1666 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1667 cmd, arg1, arg2);
1668
1669 switch (cmd) {
1670 case CAMERA_CMD_START_SMOOTH_ZOOM:
1671 return commandStartSmoothZoomL();
1672 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1673 return commandStopSmoothZoomL();
1674 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1675 return commandSetDisplayOrientationL(arg1);
1676 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1677 return commandEnableShutterSoundL(arg1 == 1);
1678 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1679 return commandPlayRecordingSoundL();
1680 case CAMERA_CMD_START_FACE_DETECTION:
1681 return commandStartFaceDetectionL(arg1);
1682 case CAMERA_CMD_STOP_FACE_DETECTION: {
1683 SharedParameters::Lock l(mParameters);
1684 return commandStopFaceDetectionL(l.mParameters);
1685 }
1686 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1687 return commandEnableFocusMoveMsgL(arg1 == 1);
1688 case CAMERA_CMD_PING:
1689 return commandPingL();
1690 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1691 case CAMERA_CMD_SET_VIDEO_FORMAT:
1692 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1693 __FUNCTION__, cmd, arg1, arg2);
1694 return BAD_VALUE;
1695 default:
1696 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1697 __FUNCTION__, cmd, arg1, arg2);
1698 return BAD_VALUE;
1699 }
1700 }
1701
commandStartSmoothZoomL()1702 status_t Camera2Client::commandStartSmoothZoomL() {
1703 ALOGE("%s: Unimplemented!", __FUNCTION__);
1704 return OK;
1705 }
1706
commandStopSmoothZoomL()1707 status_t Camera2Client::commandStopSmoothZoomL() {
1708 ALOGE("%s: Unimplemented!", __FUNCTION__);
1709 return OK;
1710 }
1711
commandSetDisplayOrientationL(int degrees)1712 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1713 int transform = Parameters::degToTransform(degrees,
1714 mCameraFacing == CAMERA_FACING_FRONT);
1715 if (transform == -1) {
1716 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1717 __FUNCTION__, mCameraId, degrees);
1718 return BAD_VALUE;
1719 }
1720 {
1721 Mutex::Autolock icl(mRotateAndCropLock);
1722 if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
1723 ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
1724 mRotateAndCropMode);
1725 transform = mRotateAndCropPreviewTransform;
1726 }
1727 }
1728 SharedParameters::Lock l(mParameters);
1729 if (transform != l.mParameters.previewTransform &&
1730 getPreviewStreamId() != NO_STREAM) {
1731 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1732 }
1733 l.mParameters.previewTransform = transform;
1734 return OK;
1735 }
1736
commandEnableShutterSoundL(bool enable)1737 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1738 SharedParameters::Lock l(mParameters);
1739 if (enable) {
1740 l.mParameters.playShutterSound = true;
1741 return OK;
1742 }
1743
1744 l.mParameters.playShutterSound = false;
1745 return OK;
1746 }
1747
commandPlayRecordingSoundL()1748 status_t Camera2Client::commandPlayRecordingSoundL() {
1749 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1750 return OK;
1751 }
1752
commandStartFaceDetectionL(int)1753 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1754 ALOGV("%s: Camera %d: Starting face detection",
1755 __FUNCTION__, mCameraId);
1756 status_t res;
1757 SharedParameters::Lock l(mParameters);
1758 switch (l.mParameters.state) {
1759 case Parameters::DISCONNECTED:
1760 case Parameters::STOPPED:
1761 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1762 case Parameters::STILL_CAPTURE:
1763 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1764 __FUNCTION__, mCameraId);
1765 return INVALID_OPERATION;
1766 case Parameters::PREVIEW:
1767 case Parameters::RECORD:
1768 case Parameters::VIDEO_SNAPSHOT:
1769 // Good to go for starting face detect
1770 break;
1771 }
1772 // Ignoring type
1773 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1774 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1775 ALOGE("%s: Camera %d: Face detection not supported",
1776 __FUNCTION__, mCameraId);
1777 return BAD_VALUE;
1778 }
1779 if (l.mParameters.enableFaceDetect) return OK;
1780
1781 l.mParameters.enableFaceDetect = true;
1782
1783 res = updateRequests(l.mParameters);
1784
1785 return res;
1786 }
1787
commandStopFaceDetectionL(Parameters & params)1788 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1789 status_t res = OK;
1790 ALOGV("%s: Camera %d: Stopping face detection",
1791 __FUNCTION__, mCameraId);
1792
1793 if (!params.enableFaceDetect) return OK;
1794
1795 params.enableFaceDetect = false;
1796
1797 if (params.state == Parameters::PREVIEW
1798 || params.state == Parameters::RECORD
1799 || params.state == Parameters::VIDEO_SNAPSHOT) {
1800 res = updateRequests(params);
1801 }
1802
1803 return res;
1804 }
1805
commandEnableFocusMoveMsgL(bool enable)1806 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1807 SharedParameters::Lock l(mParameters);
1808 l.mParameters.enableFocusMoveMessages = enable;
1809
1810 return OK;
1811 }
1812
commandPingL()1813 status_t Camera2Client::commandPingL() {
1814 // Always ping back if access is proper and device is alive
1815 SharedParameters::Lock l(mParameters);
1816 if (l.mParameters.state != Parameters::DISCONNECTED) {
1817 return OK;
1818 } else {
1819 return NO_INIT;
1820 }
1821 }
1822
notifyError(int32_t errorCode,const CaptureResultExtras & resultExtras)1823 void Camera2Client::notifyError(int32_t errorCode,
1824 const CaptureResultExtras& resultExtras) {
1825 int32_t err = CAMERA_ERROR_UNKNOWN;
1826 switch(errorCode) {
1827 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1828 err = CAMERA_ERROR_RELEASED;
1829 break;
1830 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1831 err = CAMERA_ERROR_UNKNOWN;
1832 break;
1833 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1834 err = CAMERA_ERROR_SERVER_DIED;
1835 break;
1836 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1837 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1838 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1839 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1840 __FUNCTION__, errorCode, resultExtras.requestId);
1841
1842 if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
1843 (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
1844 Mutex::Autolock al(mLatestRequestMutex);
1845
1846 mLatestFailedRequestIds.add(resultExtras.requestId);
1847 mLatestRequestSignal.signal();
1848 }
1849 mCaptureSequencer->notifyError(errorCode, resultExtras);
1850 return;
1851 default:
1852 err = CAMERA_ERROR_UNKNOWN;
1853 break;
1854 }
1855
1856 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1857 resultExtras.requestId);
1858
1859 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1860 if (l.mRemoteCallback != nullptr) {
1861 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1862 }
1863 }
1864
1865
1866 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1867 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1868 ALOGV("%s: Autofocus state now %d, last trigger %d",
1869 __FUNCTION__, newState, triggerId);
1870 bool sendCompletedMessage = false;
1871 bool sendMovingMessage = false;
1872
1873 bool success = false;
1874 bool afInMotion = false;
1875 {
1876 SharedParameters::Lock l(mParameters);
1877 // Trace end of AF state
1878 char tmp[32];
1879 if (l.mParameters.afStateCounter > 0) {
1880 camera_metadata_enum_snprint(
1881 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1882 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1883 }
1884
1885 // Update state
1886 l.mParameters.focusState = newState;
1887 l.mParameters.afStateCounter++;
1888
1889 // Trace start of AF state
1890
1891 camera_metadata_enum_snprint(
1892 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1893 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1894
1895 switch (l.mParameters.focusMode) {
1896 case Parameters::FOCUS_MODE_AUTO:
1897 case Parameters::FOCUS_MODE_MACRO:
1898 // Don't send notifications upstream if they're not for the current AF
1899 // trigger. For example, if cancel was called in between, or if we
1900 // already sent a notification about this AF call.
1901 if (triggerId != l.mParameters.currentAfTriggerId) break;
1902 switch (newState) {
1903 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1904 success = true;
1905 FALLTHROUGH_INTENDED;
1906 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1907 sendCompletedMessage = true;
1908 l.mParameters.currentAfTriggerId = -1;
1909 break;
1910 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1911 // Just starting focusing, ignore
1912 break;
1913 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1914 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1915 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1916 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1917 default:
1918 // Unexpected in AUTO/MACRO mode
1919 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1920 __FUNCTION__, newState);
1921 break;
1922 }
1923 break;
1924 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1925 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1926 switch (newState) {
1927 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1928 success = true;
1929 FALLTHROUGH_INTENDED;
1930 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1931 // Don't send notifications upstream if they're not for
1932 // the current AF trigger. For example, if cancel was
1933 // called in between, or if we already sent a
1934 // notification about this AF call.
1935 // Send both a 'AF done' callback and a 'AF move' callback
1936 if (triggerId != l.mParameters.currentAfTriggerId) break;
1937 sendCompletedMessage = true;
1938 afInMotion = false;
1939 if (l.mParameters.enableFocusMoveMessages &&
1940 l.mParameters.afInMotion) {
1941 sendMovingMessage = true;
1942 }
1943 l.mParameters.currentAfTriggerId = -1;
1944 break;
1945 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1946 // Cancel was called, or we switched state; care if
1947 // currently moving
1948 afInMotion = false;
1949 if (l.mParameters.enableFocusMoveMessages &&
1950 l.mParameters.afInMotion) {
1951 sendMovingMessage = true;
1952 }
1953 break;
1954 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1955 // Start passive scan, inform upstream
1956 afInMotion = true;
1957 FALLTHROUGH_INTENDED;
1958 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1959 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1960 // Stop passive scan, inform upstream
1961 if (l.mParameters.enableFocusMoveMessages) {
1962 sendMovingMessage = true;
1963 }
1964 break;
1965 }
1966 l.mParameters.afInMotion = afInMotion;
1967 break;
1968 case Parameters::FOCUS_MODE_EDOF:
1969 case Parameters::FOCUS_MODE_INFINITY:
1970 case Parameters::FOCUS_MODE_FIXED:
1971 default:
1972 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1973 ALOGE("%s: Unexpected AF state change %d "
1974 "(ID %d) in focus mode %d",
1975 __FUNCTION__, newState, triggerId,
1976 l.mParameters.focusMode);
1977 }
1978 }
1979 }
1980 if (sendMovingMessage) {
1981 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1982 if (l.mRemoteCallback != 0) {
1983 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1984 afInMotion ? 1 : 0, 0);
1985 }
1986 }
1987 if (sendCompletedMessage) {
1988 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1989 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1990 if (l.mRemoteCallback != 0) {
1991 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1992 success ? 1 : 0, 0);
1993 }
1994 }
1995 }
1996
notifyAutoExposure(uint8_t newState,int triggerId)1997 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
1998 ALOGV("%s: Autoexposure state now %d, last trigger %d",
1999 __FUNCTION__, newState, triggerId);
2000 {
2001 SharedParameters::Lock l(mParameters);
2002 // Update state
2003 l.mParameters.aeState = newState;
2004 }
2005 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
2006 }
2007
notifyShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)2008 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
2009 nsecs_t timestamp) {
2010 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
2011 __FUNCTION__, resultExtras.requestId, timestamp);
2012 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
2013
2014 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
2015 }
2016
getParameters()2017 camera2::SharedParameters& Camera2Client::getParameters() {
2018 return mParameters;
2019 }
2020
getPreviewStreamId() const2021 int Camera2Client::getPreviewStreamId() const {
2022 return mStreamingProcessor->getPreviewStreamId();
2023 }
2024
getCaptureStreamId() const2025 int Camera2Client::getCaptureStreamId() const {
2026 return mJpegProcessor->getStreamId();
2027 }
2028
getCallbackStreamId() const2029 int Camera2Client::getCallbackStreamId() const {
2030 return mCallbackProcessor->getStreamId();
2031 }
2032
getRecordingStreamId() const2033 int Camera2Client::getRecordingStreamId() const {
2034 return mStreamingProcessor->getRecordingStreamId();
2035 }
2036
getZslStreamId() const2037 int Camera2Client::getZslStreamId() const {
2038 return mZslProcessor->getStreamId();
2039 }
2040
registerFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener,bool sendPartials)2041 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
2042 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
2043 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
2044 }
2045
removeFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener)2046 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
2047 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
2048 return mFrameProcessor->removeListener(minId, maxId, listener);
2049 }
2050
stopStream()2051 status_t Camera2Client::stopStream() {
2052 return mStreamingProcessor->stopStream();
2053 }
2054
createJpegStreamL(Parameters & params)2055 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
2056 status_t res = OK;
2057 int lastJpegStreamId = mJpegProcessor->getStreamId();
2058 if (lastJpegStreamId != NO_STREAM) {
2059 return INVALID_OPERATION;
2060 }
2061
2062 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2063 if (res != OK) {
2064 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2065 __FUNCTION__, mCameraId, strerror(-res), res);
2066 return res;
2067 }
2068
2069 res = mDevice->flush();
2070 if (res != OK) {
2071 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
2072 __FUNCTION__, mCameraId, strerror(-res), res);
2073 return res;
2074 }
2075
2076 // Ideally we don't need this, but current camera device
2077 // status tracking mechanism demands it.
2078 res = mDevice->waitUntilDrained();
2079 if (res != OK) {
2080 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
2081 __FUNCTION__, mCameraId, strerror(-res), res);
2082 }
2083
2084 res = updateProcessorStream(mJpegProcessor, params);
2085 return res;
2086 }
2087
2088 const int32_t Camera2Client::kPreviewRequestIdStart;
2089 const int32_t Camera2Client::kPreviewRequestIdEnd;
2090 const int32_t Camera2Client::kRecordingRequestIdStart;
2091 const int32_t Camera2Client::kRecordingRequestIdEnd;
2092 const int32_t Camera2Client::kCaptureRequestIdStart;
2093 const int32_t Camera2Client::kCaptureRequestIdEnd;
2094
2095 /** Utility methods */
2096
updateRequests(Parameters & params)2097 status_t Camera2Client::updateRequests(Parameters ¶ms) {
2098 status_t res;
2099
2100 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
2101
2102 res = mStreamingProcessor->incrementStreamingIds();
2103 if (res != OK) {
2104 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
2105 __FUNCTION__, mCameraId, strerror(-res), res);
2106 return res;
2107 }
2108
2109 res = mStreamingProcessor->updatePreviewRequest(params);
2110 if (res != OK) {
2111 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
2112 __FUNCTION__, mCameraId, strerror(-res), res);
2113 return res;
2114 }
2115 res = mStreamingProcessor->updateRecordingRequest(params);
2116 if (res != OK) {
2117 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
2118 __FUNCTION__, mCameraId, strerror(-res), res);
2119 return res;
2120 }
2121
2122 if (params.state == Parameters::PREVIEW) {
2123 res = startPreviewL(params, true);
2124 if (res != OK) {
2125 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2126 __FUNCTION__, mCameraId, strerror(-res), res);
2127 return res;
2128 }
2129 } else if (params.state == Parameters::RECORD ||
2130 params.state == Parameters::VIDEO_SNAPSHOT) {
2131 res = startRecordingL(params, true);
2132 if (res != OK) {
2133 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2134 __FUNCTION__, mCameraId, strerror(-res), res);
2135 return res;
2136 }
2137 }
2138 return res;
2139 }
2140
2141
calculateBufferSize(int width,int height,int format,int stride)2142 size_t Camera2Client::calculateBufferSize(int width, int height,
2143 int format, int stride) {
2144 switch (format) {
2145 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2146 return width * height * 2;
2147 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2148 return width * height * 3 / 2;
2149 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2150 return width * height * 2;
2151 case HAL_PIXEL_FORMAT_YV12: { // YV12
2152 size_t ySize = stride * height;
2153 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2154 size_t uvSize = uvStride * height / 2;
2155 return ySize + uvSize * 2;
2156 }
2157 case HAL_PIXEL_FORMAT_RGB_565:
2158 return width * height * 2;
2159 case HAL_PIXEL_FORMAT_RGBA_8888:
2160 return width * height * 4;
2161 case HAL_PIXEL_FORMAT_RAW16:
2162 return width * height * 2;
2163 default:
2164 ALOGE("%s: Unknown preview format: %x",
2165 __FUNCTION__, format);
2166 return 0;
2167 }
2168 }
2169
syncWithDevice()2170 status_t Camera2Client::syncWithDevice() {
2171 ATRACE_CALL();
2172 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2173 status_t res;
2174
2175 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2176 if (activeRequestId == 0) return OK;
2177
2178 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2179 if (res == TIMED_OUT) {
2180 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2181 __FUNCTION__, mCameraId);
2182 } else if (res != OK) {
2183 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2184 __FUNCTION__, mCameraId);
2185 }
2186 return res;
2187 }
2188
2189 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)2190 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2191 camera2::Parameters params) {
2192 // No default template arguments until C++11, so we need this overload
2193 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2194 processor, params);
2195 }
2196
2197 template <typename ProcessorT,
2198 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)2199 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2200 Parameters params) {
2201 status_t res;
2202
2203 // Get raw pointer since sp<T> doesn't have operator->*
2204 ProcessorT *processorPtr = processor.get();
2205 res = (processorPtr->*updateStreamF)(params);
2206
2207 /**
2208 * Can't update the stream if it's busy?
2209 *
2210 * Then we need to stop the device (by temporarily clearing the request
2211 * queue) and then try again. Resume streaming once we're done.
2212 */
2213 if (res == -EBUSY) {
2214 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2215 mCameraId);
2216 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2217 if (res != OK) {
2218 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2219 __FUNCTION__, mCameraId, strerror(-res), res);
2220 }
2221
2222 res = mDevice->waitUntilDrained();
2223 if (res != OK) {
2224 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2225 __FUNCTION__, mCameraId, strerror(-res), res);
2226 }
2227
2228 res = (processorPtr->*updateStreamF)(params);
2229 if (res != OK) {
2230 ALOGE("%s: Camera %d: Failed to update processing stream "
2231 " despite having halted streaming first: %s (%d)",
2232 __FUNCTION__, mCameraId, strerror(-res), res);
2233 }
2234
2235 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2236 if (res != OK) {
2237 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2238 __FUNCTION__, mCameraId, strerror(-res), res);
2239 }
2240 }
2241
2242 return res;
2243 }
2244
overrideVideoSnapshotSize(Parameters & params)2245 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2246 ALOGV("%s: Camera %d: configure still size to video size before recording"
2247 , __FUNCTION__, mCameraId);
2248 params.overrideJpegSizeByVideoSize();
2249 status_t res = updateProcessorStream(mJpegProcessor, params);
2250 if (res != OK) {
2251 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2252 __FUNCTION__, mCameraId, strerror(-res), res);
2253 }
2254 return res;
2255 }
2256
setVideoTarget(const sp<IGraphicBufferProducer> & bufferProducer)2257 status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
2258 ATRACE_CALL();
2259 ALOGV("%s: E", __FUNCTION__);
2260 Mutex::Autolock icl(mBinderSerializationLock);
2261 status_t res;
2262 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2263
2264 sp<IBinder> binder = IInterface::asBinder(bufferProducer);
2265 if (binder == mVideoSurface) {
2266 ALOGV("%s: Camera %d: New video window is same as old video window",
2267 __FUNCTION__, mCameraId);
2268 return NO_ERROR;
2269 }
2270
2271 sp<Surface> window;
2272 int format;
2273 android_dataspace dataSpace;
2274
2275 if (bufferProducer != nullptr) {
2276 // Using controlledByApp flag to ensure that the buffer queue remains in
2277 // async mode for the old camera API, where many applications depend
2278 // on that behavior.
2279 window = new Surface(bufferProducer, /*controlledByApp*/ true);
2280
2281 ANativeWindow *anw = window.get();
2282
2283 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2284 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2285 return res;
2286 }
2287
2288 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2289 reinterpret_cast<int*>(&dataSpace))) != OK) {
2290 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2291 return res;
2292 }
2293 }
2294
2295 Parameters::State state;
2296 {
2297 SharedParameters::Lock l(mParameters);
2298 state = l.mParameters.state;
2299 }
2300
2301 switch (state) {
2302 case Parameters::STOPPED:
2303 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2304 case Parameters::PREVIEW:
2305 // OK
2306 break;
2307 case Parameters::DISCONNECTED:
2308 case Parameters::RECORD:
2309 case Parameters::STILL_CAPTURE:
2310 case Parameters::VIDEO_SNAPSHOT:
2311 default:
2312 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2313 __FUNCTION__, mCameraId,
2314 Parameters::getStateName(state));
2315 return INVALID_OPERATION;
2316 }
2317
2318 mVideoSurface = binder;
2319 res = mStreamingProcessor->setRecordingWindow(window);
2320 if (res != OK) {
2321 ALOGE("%s: Unable to set new recording window: %s (%d)",
2322 __FUNCTION__, strerror(-res), res);
2323 return res;
2324 }
2325
2326 {
2327 SharedParameters::Lock l(mParameters);
2328 l.mParameters.videoFormat = format;
2329 l.mParameters.videoDataSpace = dataSpace;
2330 }
2331
2332 return OK;
2333 }
2334
setAudioRestriction(int)2335 status_t Camera2Client::setAudioRestriction(int /*mode*/) {
2336 // Empty implementation. setAudioRestriction is hidden interface and not
2337 // supported by android.hardware.Camera API
2338 return INVALID_OPERATION;
2339 }
2340
getGlobalAudioRestriction()2341 int32_t Camera2Client::getGlobalAudioRestriction() {
2342 // Empty implementation. getAudioRestriction is hidden interface and not
2343 // supported by android.hardware.Camera API
2344 return INVALID_OPERATION;
2345 }
2346
setCameraServiceWatchdog(bool enabled)2347 status_t Camera2Client::setCameraServiceWatchdog(bool enabled) {
2348 return mDevice->setCameraServiceWatchdog(enabled);
2349 }
2350
setRotateAndCropOverride(uint8_t rotateAndCrop,bool fromHal)2351 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal) {
2352 if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
2353
2354 {
2355 Mutex::Autolock icl(mRotateAndCropLock);
2356 if (mRotateAndCropIsSupported) {
2357 mRotateAndCropMode = rotateAndCrop;
2358 } else {
2359 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2360 return OK;
2361 }
2362 }
2363
2364 return mDevice->setRotateAndCropAutoBehavior(
2365 static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop), fromHal);
2366 }
2367
setAutoframingOverride(uint8_t autoframingValue)2368 status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
2369 if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
2370
2371 return mDevice->setAutoframingAutoBehavior(
2372 static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
2373 }
2374
supportsCameraMute()2375 bool Camera2Client::supportsCameraMute() {
2376 return mDevice->supportsCameraMute();
2377 }
2378
setCameraMute(bool enabled)2379 status_t Camera2Client::setCameraMute(bool enabled) {
2380 return mDevice->setCameraMute(enabled);
2381 }
2382
setStreamUseCaseOverrides(const std::vector<int64_t> & useCaseOverrides)2383 void Camera2Client::setStreamUseCaseOverrides(
2384 const std::vector<int64_t>& useCaseOverrides) {
2385 mDevice->setStreamUseCaseOverrides(useCaseOverrides);
2386 }
2387
clearStreamUseCaseOverrides()2388 void Camera2Client::clearStreamUseCaseOverrides() {
2389 mDevice->clearStreamUseCaseOverrides();
2390 }
2391
supportsZoomOverride()2392 bool Camera2Client::supportsZoomOverride() {
2393 return mDevice->supportsZoomOverride();
2394 }
2395
setZoomOverride(int zoomOverride)2396 status_t Camera2Client::setZoomOverride(int zoomOverride) {
2397 return mDevice->setZoomOverride(zoomOverride);
2398 }
2399
waitUntilCurrentRequestIdLocked()2400 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
2401 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2402 if (activeRequestId != 0) {
2403 auto res = waitUntilRequestIdApplied(activeRequestId,
2404 mDevice->getExpectedInFlightDuration());
2405 if (res == TIMED_OUT) {
2406 ALOGE("%s: Camera %d: Timed out waiting for current request id to return in results!",
2407 __FUNCTION__, mCameraId);
2408 return res;
2409 } else if (res != OK) {
2410 ALOGE("%s: Camera %d: Error while waiting for current request id to return in results!",
2411 __FUNCTION__, mCameraId);
2412 return res;
2413 }
2414 }
2415
2416 return OK;
2417 }
2418
waitUntilRequestIdApplied(int32_t requestId,nsecs_t timeout)2419 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
2420 Mutex::Autolock l(mLatestRequestMutex);
2421 while ((std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) ==
2422 mLatestRequestIds.end()) &&
2423 (std::find(mLatestFailedRequestIds.begin(), mLatestFailedRequestIds.end(), requestId) ==
2424 mLatestFailedRequestIds.end())) {
2425 nsecs_t startTime = systemTime();
2426
2427 auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
2428 if (res != OK) return res;
2429
2430 timeout -= (systemTime() - startTime);
2431 }
2432
2433 return (std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) !=
2434 mLatestRequestIds.end()) ? OK : DEAD_OBJECT;
2435 }
2436
notifyRequestId(int32_t requestId)2437 void Camera2Client::notifyRequestId(int32_t requestId) {
2438 Mutex::Autolock al(mLatestRequestMutex);
2439
2440 mLatestRequestIds.add(requestId);
2441 mLatestRequestSignal.signal();
2442 }
2443
2444 const char* Camera2Client::kAutofocusLabel = "autofocus";
2445 const char* Camera2Client::kTakepictureLabel = "take_picture";
2446
2447 } // namespace android
2448